repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
YYWen0o0/python-frame-django | django/forms/fields.py | 1 | 47569 | """
Field classes.
"""
from __future__ import unicode_literals
import copy
import datetime
import os
import re
import sys
import warnings
from decimal import Decimal, DecimalException
from io import BytesIO
from django.core import validators
from django.core.exceptions import ValidationError
from django.forms.utils import from_current_timezone, to_current_timezone
from django.forms.widgets import (
TextInput, NumberInput, EmailInput, URLInput, HiddenInput,
MultipleHiddenInput, ClearableFileInput, CheckboxInput, Select,
NullBooleanSelect, SelectMultiple, DateInput, DateTimeInput, TimeInput,
SplitDateTimeWidget, SplitHiddenDateTimeWidget, FILE_INPUT_CONTRADICTION
)
from django.utils import formats
from django.utils.encoding import smart_text, force_str, force_text
from django.utils.ipv6 import clean_ipv6_address
from django.utils.deprecation import RemovedInDjango19Warning, RemovedInDjango20Warning
from django.utils import six
from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
# Provide this import for backwards compatibility.
from django.core.validators import EMPTY_VALUES # NOQA
__all__ = (
'Field', 'CharField', 'IntegerField',
'DateField', 'TimeField', 'DateTimeField',
'RegexField', 'EmailField', 'FileField', 'ImageField', 'URLField',
'BooleanField', 'NullBooleanField', 'ChoiceField', 'MultipleChoiceField',
'ComboField', 'MultiValueField', 'FloatField', 'DecimalField',
'SplitDateTimeField', 'IPAddressField', 'GenericIPAddressField', 'FilePathField',
'SlugField', 'TypedChoiceField', 'TypedMultipleChoiceField'
)
class Field(object):
widget = TextInput # Default widget to use when rendering this type of Field.
hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden".
default_validators = [] # Default set of validators
# Add an 'invalid' entry to default_error_message if you want a specific
# field error message not raised by the field validators.
default_error_messages = {
'required': _('This field is required.'),
}
empty_values = list(validators.EMPTY_VALUES)
# Tracks each time a Field instance is created. Used to retain order.
creation_counter = 0
def __init__(self, required=True, widget=None, label=None, initial=None,
help_text='', error_messages=None, show_hidden_initial=False,
validators=[], localize=False, label_suffix=None):
# required -- Boolean that specifies whether the field is required.
# True by default.
# widget -- A Widget class, or instance of a Widget class, that should
# be used for this Field when displaying it. Each Field has a
# default Widget that it'll use if you don't specify this. In
# most cases, the default widget is TextInput.
# label -- A verbose name for this field, for use in displaying this
# field in a form. By default, Django will use a "pretty"
# version of the form field name, if the Field is part of a
# Form.
# initial -- A value to use in this Field's initial display. This value
# is *not* used as a fallback if data isn't given.
# help_text -- An optional string to use as "help text" for this Field.
# error_messages -- An optional dictionary to override the default
# messages that the field will raise.
# show_hidden_initial -- Boolean that specifies if it is needed to render a
# hidden widget with initial value after widget.
# validators -- List of additional validators to use
# localize -- Boolean that specifies if the field should be localized.
# label_suffix -- Suffix to be added to the label. Overrides
# form's label_suffix.
self.required, self.label, self.initial = required, label, initial
self.show_hidden_initial = show_hidden_initial
self.help_text = help_text
self.label_suffix = label_suffix
widget = widget or self.widget
if isinstance(widget, type):
widget = widget()
# Trigger the localization machinery if needed.
self.localize = localize
if self.localize:
widget.is_localized = True
# Let the widget know whether it should display as required.
widget.is_required = self.required
# Hook into self.widget_attrs() for any Field-specific HTML attributes.
extra_attrs = self.widget_attrs(widget)
if extra_attrs:
widget.attrs.update(extra_attrs)
self.widget = widget
# Increase the creation counter, and save our local copy.
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self.error_messages = messages
self.validators = self.default_validators + validators
super(Field, self).__init__()
def prepare_value(self, value):
return value
def to_python(self, value):
return value
def validate(self, value):
if value in self.empty_values and self.required:
raise ValidationError(self.error_messages['required'], code='required')
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise ValidationError(errors)
def clean(self, value):
"""
Validates the given value and returns its "cleaned" value as an
appropriate Python object.
Raises ValidationError for any errors.
"""
value = self.to_python(value)
self.validate(value)
self.run_validators(value)
return value
def bound_data(self, data, initial):
"""
Return the value that should be shown for this field on render of a
bound form, given the submitted POST data for the field and the initial
data, if any.
For most fields, this will simply be data; FileFields need to handle it
a bit differently.
"""
return data
def widget_attrs(self, widget):
"""
Given a Widget instance (*not* a Widget class), returns a dictionary of
any HTML attributes that should be added to the Widget, based on this
Field.
"""
return {}
def get_limit_choices_to(self):
"""
Returns ``limit_choices_to`` for this form field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.limit_choices_to):
return self.limit_choices_to()
return self.limit_choices_to
def _has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or initial value we get
# is None, replace it w/ ''.
initial_value = initial if initial is not None else ''
try:
data = self.to_python(data)
if hasattr(self, '_coerce'):
data = self._coerce(data)
except ValidationError:
return True
data_value = data if data is not None else ''
return initial_value != data_value
def __deepcopy__(self, memo):
result = copy.copy(self)
memo[id(self)] = result
result.widget = copy.deepcopy(self.widget, memo)
result.validators = self.validators[:]
return result
class CharField(Field):
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
self.max_length, self.min_length = max_length, min_length
super(CharField, self).__init__(*args, **kwargs)
if min_length is not None:
self.validators.append(validators.MinLengthValidator(int(min_length)))
if max_length is not None:
self.validators.append(validators.MaxLengthValidator(int(max_length)))
def to_python(self, value):
"Returns a Unicode object."
if value in self.empty_values:
return ''
return smart_text(value)
def widget_attrs(self, widget):
attrs = super(CharField, self).widget_attrs(widget)
if self.max_length is not None:
# The HTML attribute is maxlength, not max_length.
attrs.update({'maxlength': str(self.max_length)})
return attrs
class IntegerField(Field):
widget = NumberInput
default_error_messages = {
'invalid': _('Enter a whole number.'),
}
def __init__(self, max_value=None, min_value=None, *args, **kwargs):
self.max_value, self.min_value = max_value, min_value
if kwargs.get('localize') and self.widget == NumberInput:
# Localized number input is not well supported on most browsers
kwargs.setdefault('widget', super(IntegerField, self).widget)
super(IntegerField, self).__init__(*args, **kwargs)
if max_value is not None:
self.validators.append(validators.MaxValueValidator(max_value))
if min_value is not None:
self.validators.append(validators.MinValueValidator(min_value))
def to_python(self, value):
"""
Validates that int() can be called on the input. Returns the result
of int(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = int(str(value))
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def widget_attrs(self, widget):
attrs = super(IntegerField, self).widget_attrs(widget)
if isinstance(widget, NumberInput):
if self.min_value is not None:
attrs['min'] = self.min_value
if self.max_value is not None:
attrs['max'] = self.max_value
return attrs
class FloatField(IntegerField):
default_error_messages = {
'invalid': _('Enter a number.'),
}
def to_python(self, value):
"""
Validates that float() can be called on the input. Returns the result
of float(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = float(value)
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def validate(self, value):
super(FloatField, self).validate(value)
# Check for NaN (which is the only thing not equal to itself) and +/- infinity
if value != value or value in (Decimal('Inf'), Decimal('-Inf')):
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def widget_attrs(self, widget):
attrs = super(FloatField, self).widget_attrs(widget)
if isinstance(widget, NumberInput) and 'step' not in widget.attrs:
attrs.setdefault('step', 'any')
return attrs
class DecimalField(IntegerField):
default_error_messages = {
'invalid': _('Enter a number.'),
'max_digits': ungettext_lazy(
'Ensure that there are no more than %(max)s digit in total.',
'Ensure that there are no more than %(max)s digits in total.',
'max'),
'max_decimal_places': ungettext_lazy(
'Ensure that there are no more than %(max)s decimal place.',
'Ensure that there are no more than %(max)s decimal places.',
'max'),
'max_whole_digits': ungettext_lazy(
'Ensure that there are no more than %(max)s digit before the decimal point.',
'Ensure that there are no more than %(max)s digits before the decimal point.',
'max'),
}
def __init__(self, max_value=None, min_value=None, max_digits=None, decimal_places=None, *args, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super(DecimalField, self).__init__(max_value, min_value, *args, **kwargs)
def to_python(self, value):
"""
Validates that the input is a decimal number. Returns a Decimal
instance. Returns None for empty values. Ensures that there are no more
than max_digits in the number, and no more than decimal_places digits
after the decimal point.
"""
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
value = smart_text(value).strip()
try:
value = Decimal(value)
except DecimalException:
raise ValidationError(self.error_messages['invalid'], code='invalid')
return value
def validate(self, value):
super(DecimalField, self).validate(value)
if value in self.empty_values:
return
# Check for NaN, Inf and -Inf values. We can't compare directly for NaN,
# since it is never equal to itself. However, NaN is the only value that
# isn't equal to itself, so we can use this to identify NaN
if value != value or value == Decimal("Inf") or value == Decimal("-Inf"):
raise ValidationError(self.error_messages['invalid'], code='invalid')
sign, digittuple, exponent = value.as_tuple()
decimals = abs(exponent)
# digittuple doesn't include any leading zeros.
digits = len(digittuple)
if decimals > digits:
# We have leading zeros up to or past the decimal point. Count
# everything past the decimal point as a digit. We do not count
# 0 before the decimal point as a digit since that would mean
# we would not allow max_digits = decimal_places.
digits = decimals
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.error_messages['max_digits'],
code='max_digits',
params={'max': self.max_digits},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.error_messages['max_decimal_places'],
code='max_decimal_places',
params={'max': self.decimal_places},
)
if (self.max_digits is not None and self.decimal_places is not None
and whole_digits > (self.max_digits - self.decimal_places)):
raise ValidationError(
self.error_messages['max_whole_digits'],
code='max_whole_digits',
params={'max': (self.max_digits - self.decimal_places)},
)
return value
def widget_attrs(self, widget):
attrs = super(DecimalField, self).widget_attrs(widget)
if isinstance(widget, NumberInput) and 'step' not in widget.attrs:
if self.decimal_places is not None:
# Use exponential notation for small values since they might
# be parsed as 0 otherwise. ref #20765
step = str(Decimal('1') / 10 ** self.decimal_places).lower()
else:
step = 'any'
attrs.setdefault('step', step)
return attrs
class BaseTemporalField(Field):
def __init__(self, input_formats=None, *args, **kwargs):
super(BaseTemporalField, self).__init__(*args, **kwargs)
if input_formats is not None:
self.input_formats = input_formats
def to_python(self, value):
# Try to coerce the value to unicode.
unicode_value = force_text(value, strings_only=True)
if isinstance(unicode_value, six.text_type):
value = unicode_value.strip()
# If unicode, try to strptime against each input format.
if isinstance(value, six.text_type):
for format in self.input_formats:
try:
return self.strptime(value, format)
except (ValueError, TypeError):
continue
raise ValidationError(self.error_messages['invalid'], code='invalid')
def strptime(self, value, format):
raise NotImplementedError('Subclasses must define this method.')
class DateField(BaseTemporalField):
widget = DateInput
input_formats = formats.get_format_lazy('DATE_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date.'),
}
def to_python(self, value):
"""
Validates that the input can be converted to a date. Returns a Python
datetime.date object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
return super(DateField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format).date()
class TimeField(BaseTemporalField):
widget = TimeInput
input_formats = formats.get_format_lazy('TIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid time.')
}
def to_python(self, value):
"""
Validates that the input can be converted to a time. Returns a Python
datetime.time object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.time):
return value
return super(TimeField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format).time()
class DateTimeField(BaseTemporalField):
widget = DateTimeInput
input_formats = formats.get_format_lazy('DATETIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date/time.'),
}
def prepare_value(self, value):
if isinstance(value, datetime.datetime):
value = to_current_timezone(value)
return value
def to_python(self, value):
"""
Validates that the input can be converted to a datetime. Returns a
Python datetime.datetime object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return from_current_timezone(value)
if isinstance(value, datetime.date):
result = datetime.datetime(value.year, value.month, value.day)
return from_current_timezone(result)
if isinstance(value, list):
# Input comes from a SplitDateTimeWidget, for example. So, it's two
# components: date and time.
warnings.warn(
'Using SplitDateTimeWidget with DateTimeField is deprecated. '
'Use SplitDateTimeField instead.',
RemovedInDjango19Warning, stacklevel=2)
if len(value) != 2:
raise ValidationError(self.error_messages['invalid'], code='invalid')
if value[0] in self.empty_values and value[1] in self.empty_values:
return None
value = '%s %s' % tuple(value)
result = super(DateTimeField, self).to_python(value)
return from_current_timezone(result)
def strptime(self, value, format):
return datetime.datetime.strptime(force_str(value), format)
class RegexField(CharField):
def __init__(self, regex, max_length=None, min_length=None, error_message=None, *args, **kwargs):
"""
regex can be either a string or a compiled regular expression object.
error_message is an optional error message to use, if
'Enter a valid value' is too generic for you.
"""
# error_message is just kept for backwards compatibility:
if error_message is not None:
warnings.warn(
"The 'error_message' argument is deprecated. Use "
"Field.error_messages['invalid'] instead.",
RemovedInDjango20Warning, stacklevel=2
)
error_messages = kwargs.get('error_messages') or {}
error_messages['invalid'] = error_message
kwargs['error_messages'] = error_messages
super(RegexField, self).__init__(max_length, min_length, *args, **kwargs)
self._set_regex(regex)
def _get_regex(self):
return self._regex
def _set_regex(self, regex):
if isinstance(regex, six.string_types):
regex = re.compile(regex, re.UNICODE)
self._regex = regex
if hasattr(self, '_regex_validator') and self._regex_validator in self.validators:
self.validators.remove(self._regex_validator)
self._regex_validator = validators.RegexValidator(regex=regex)
self.validators.append(self._regex_validator)
regex = property(_get_regex, _set_regex)
class EmailField(CharField):
widget = EmailInput
default_validators = [validators.validate_email]
def clean(self, value):
value = self.to_python(value).strip()
return super(EmailField, self).clean(value)
class FileField(Field):
widget = ClearableFileInput
default_error_messages = {
'invalid': _("No file was submitted. Check the encoding type on the form."),
'missing': _("No file was submitted."),
'empty': _("The submitted file is empty."),
'max_length': ungettext_lazy(
'Ensure this filename has at most %(max)d character (it has %(length)d).',
'Ensure this filename has at most %(max)d characters (it has %(length)d).',
'max'),
'contradiction': _('Please either submit a file or check the clear checkbox, not both.')
}
def __init__(self, *args, **kwargs):
self.max_length = kwargs.pop('max_length', None)
self.allow_empty_file = kwargs.pop('allow_empty_file', False)
super(FileField, self).__init__(*args, **kwargs)
def to_python(self, data):
if data in self.empty_values:
return None
# UploadedFile objects should have name and size attributes.
try:
file_name = data.name
file_size = data.size
except AttributeError:
raise ValidationError(self.error_messages['invalid'], code='invalid')
if self.max_length is not None and len(file_name) > self.max_length:
params = {'max': self.max_length, 'length': len(file_name)}
raise ValidationError(self.error_messages['max_length'], code='max_length', params=params)
if not file_name:
raise ValidationError(self.error_messages['invalid'], code='invalid')
if not self.allow_empty_file and not file_size:
raise ValidationError(self.error_messages['empty'], code='empty')
return data
def clean(self, data, initial=None):
# If the widget got contradictory inputs, we raise a validation error
if data is FILE_INPUT_CONTRADICTION:
raise ValidationError(self.error_messages['contradiction'], code='contradiction')
# False means the field value should be cleared; further validation is
# not needed.
if data is False:
if not self.required:
return False
# If the field is required, clearing is not possible (the widget
# shouldn't return False data in that case anyway). False is not
# in self.empty_value; if a False value makes it this far
# it should be validated from here on out as None (so it will be
# caught by the required check).
data = None
if not data and initial:
return initial
return super(FileField, self).clean(data)
def bound_data(self, data, initial):
if data in (None, FILE_INPUT_CONTRADICTION):
return initial
return data
def _has_changed(self, initial, data):
if data is None:
return False
return True
class ImageField(FileField):
default_error_messages = {
'invalid_image': _("Upload a valid image. The file you uploaded was either not an image or a corrupted image."),
}
def to_python(self, data):
"""
Checks that the file-upload field data contains a valid image (GIF, JPG,
PNG, possibly others -- whatever the Python Imaging Library supports).
"""
f = super(ImageField, self).to_python(data)
if f is None:
return None
from PIL import Image
# We need to get a file object for Pillow. We might have a path or we might
# have to read the data into memory.
if hasattr(data, 'temporary_file_path'):
file = data.temporary_file_path()
else:
if hasattr(data, 'read'):
file = BytesIO(data.read())
else:
file = BytesIO(data['content'])
try:
# load() could spot a truncated JPEG, but it loads the entire
# image in memory, which is a DoS vector. See #3848 and #18520.
image = Image.open(file)
# verify() must be called immediately after the constructor.
image.verify()
# Annotating so subclasses can reuse it for their own validation
f.image = image
f.content_type = Image.MIME[image.format]
except Exception:
# Pillow doesn't recognize it as an image.
six.reraise(ValidationError, ValidationError(
self.error_messages['invalid_image'],
code='invalid_image',
), sys.exc_info()[2])
if hasattr(f, 'seek') and callable(f.seek):
f.seek(0)
return f
class URLField(CharField):
widget = URLInput
default_error_messages = {
'invalid': _('Enter a valid URL.'),
}
default_validators = [validators.URLValidator()]
def to_python(self, value):
def split_url(url):
"""
Returns a list of url parts via ``urlparse.urlsplit`` (or raises a
``ValidationError`` exception for certain).
"""
try:
return list(urlsplit(url))
except ValueError:
# urlparse.urlsplit can raise a ValueError with some
# misformatted URLs.
raise ValidationError(self.error_messages['invalid'], code='invalid')
value = super(URLField, self).to_python(value)
if value:
url_fields = split_url(value)
if not url_fields[0]:
# If no URL scheme given, assume http://
url_fields[0] = 'http'
if not url_fields[1]:
# Assume that if no domain is provided, that the path segment
# contains the domain.
url_fields[1] = url_fields[2]
url_fields[2] = ''
# Rebuild the url_fields list, since the domain segment may now
# contain the path too.
url_fields = split_url(urlunsplit(url_fields))
value = urlunsplit(url_fields)
return value
def clean(self, value):
value = self.to_python(value).strip()
return super(URLField, self).clean(value)
class BooleanField(Field):
widget = CheckboxInput
def to_python(self, value):
"""Returns a Python boolean object."""
# Explicitly check for the string 'False', which is what a hidden field
# will submit for False. Also check for '0', since this is what
# RadioSelect will provide. Because bool("True") == bool('1') == True,
# we don't need to handle that explicitly.
if isinstance(value, six.string_types) and value.lower() in ('false', '0'):
value = False
else:
value = bool(value)
return super(BooleanField, self).to_python(value)
def validate(self, value):
if not value and self.required:
raise ValidationError(self.error_messages['required'], code='required')
def _has_changed(self, initial, data):
# Sometimes data or initial could be None or '' which should be the
# same thing as False.
if initial == 'False':
# show_hidden_initial may have transformed False to 'False'
initial = False
return bool(initial) != bool(data)
class NullBooleanField(BooleanField):
"""
A field whose valid values are None, True and False. Invalid values are
cleaned to None.
"""
widget = NullBooleanSelect
def to_python(self, value):
"""
Explicitly checks for the string 'True' and 'False', which is what a
hidden field will submit for True and False, for 'true' and 'false',
which are likely to be returned by JavaScript serializations of forms,
and for '1' and '0', which is what a RadioField will submit. Unlike
the Booleanfield we need to explicitly check for True, because we are
not using the bool() function
"""
if value in (True, 'True', 'true', '1'):
return True
elif value in (False, 'False', 'false', '0'):
return False
else:
return None
def validate(self, value):
pass
def _has_changed(self, initial, data):
# None (unknown) and False (No) are not the same
if initial is not None:
initial = bool(initial)
if data is not None:
data = bool(data)
return initial != data
class ChoiceField(Field):
widget = Select
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
}
def __init__(self, choices=(), required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
super(ChoiceField, self).__init__(required=required, widget=widget, label=label,
initial=initial, help_text=help_text, *args, **kwargs)
self.choices = choices
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
result._choices = copy.deepcopy(self._choices, memo)
return result
def _get_choices(self):
return self._choices
def _set_choices(self, value):
# Setting choices also sets the choices on the widget.
# choices can be any iterable, but we call list() on it because
# it will be consumed more than once.
self._choices = self.widget.choices = list(value)
choices = property(_get_choices, _set_choices)
def to_python(self, value):
"Returns a Unicode object."
if value in self.empty_values:
return ''
return smart_text(value)
def validate(self, value):
"""
Validates that the input is in self.choices.
"""
super(ChoiceField, self).validate(value)
if value and not self.valid_value(value):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
text_value = force_text(value)
for k, v in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == force_text(k2):
return True
else:
if value == k or text_value == force_text(k):
return True
return False
class TypedChoiceField(ChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', '')
super(TypedChoiceField, self).__init__(*args, **kwargs)
def _coerce(self, value):
"""
Validate that the value can be coerced to the right type (if not empty).
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
try:
value = self.coerce(value)
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
return value
def clean(self, value):
value = super(TypedChoiceField, self).clean(value)
return self._coerce(value)
class MultipleChoiceField(ChoiceField):
hidden_widget = MultipleHiddenInput
widget = SelectMultiple
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
'invalid_list': _('Enter a list of values.'),
}
def to_python(self, value):
if not value:
return []
elif not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['invalid_list'], code='invalid_list')
return [smart_text(val) for val in value]
def validate(self, value):
"""
Validates that the input is a list or tuple.
"""
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in initial)
data_set = set(force_text(value) for value in data)
return data_set != initial_set
class TypedMultipleChoiceField(MultipleChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', [])
super(TypedMultipleChoiceField, self).__init__(*args, **kwargs)
def _coerce(self, value):
"""
Validates that the values are in self.choices and can be coerced to the
right type.
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
new_value = []
for choice in value:
try:
new_value.append(self.coerce(choice))
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': choice},
)
return new_value
def clean(self, value):
value = super(TypedMultipleChoiceField, self).clean(value)
return self._coerce(value)
def validate(self, value):
if value != self.empty_value:
super(TypedMultipleChoiceField, self).validate(value)
elif self.required:
raise ValidationError(self.error_messages['required'], code='required')
class ComboField(Field):
"""
A Field whose clean() method calls multiple Field clean() methods.
"""
def __init__(self, fields=(), *args, **kwargs):
super(ComboField, self).__init__(*args, **kwargs)
# Set 'required' to False on the individual fields, because the
# required validation will be handled by ComboField, not by those
# individual fields.
for f in fields:
f.required = False
self.fields = fields
def clean(self, value):
"""
Validates the given value against all of self.fields, which is a
list of Field instances.
"""
super(ComboField, self).clean(value)
for field in self.fields:
value = field.clean(value)
return value
class MultiValueField(Field):
"""
A Field that aggregates the logic of multiple Fields.
Its clean() method takes a "decompressed" list of values, which are then
cleaned into a single value according to self.fields. Each value in
this list is cleaned by the corresponding field -- the first value is
cleaned by the first field, the second value is cleaned by the second
field, etc. Once all fields are cleaned, the list of clean values is
"compressed" into a single value.
Subclasses should not have to implement clean(). Instead, they must
implement compress(), which takes a list of valid values and returns a
"compressed" version of those values -- a single value.
You'll probably want to use this with MultiWidget.
"""
default_error_messages = {
'invalid': _('Enter a list of values.'),
'incomplete': _('Enter a complete value.'),
}
def __init__(self, fields=(), *args, **kwargs):
self.require_all_fields = kwargs.pop('require_all_fields', True)
super(MultiValueField, self).__init__(*args, **kwargs)
for f in fields:
f.error_messages.setdefault('incomplete',
self.error_messages['incomplete'])
if self.require_all_fields:
# Set 'required' to False on the individual fields, because the
# required validation will be handled by MultiValueField, not
# by those individual fields.
f.required = False
self.fields = fields
def __deepcopy__(self, memo):
result = super(MultiValueField, self).__deepcopy__(memo)
result.fields = tuple([x.__deepcopy__(memo) for x in self.fields])
return result
def validate(self, value):
pass
def clean(self, value):
"""
Validates every value in the given list. A value is validated against
the corresponding Field in self.fields.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), clean() would call
DateField.clean(value[0]) and TimeField.clean(value[1]).
"""
clean_data = []
errors = []
if not value or isinstance(value, (list, tuple)):
if not value or not [v for v in value if v not in self.empty_values]:
if self.required:
raise ValidationError(self.error_messages['required'], code='required')
else:
return self.compress([])
else:
raise ValidationError(self.error_messages['invalid'], code='invalid')
for i, field in enumerate(self.fields):
try:
field_value = value[i]
except IndexError:
field_value = None
if field_value in self.empty_values:
if self.require_all_fields:
# Raise a 'required' error if the MultiValueField is
# required and any field is empty.
if self.required:
raise ValidationError(self.error_messages['required'], code='required')
elif field.required:
# Otherwise, add an 'incomplete' error to the list of
# collected errors and skip field cleaning, if a required
# field is empty.
if field.error_messages['incomplete'] not in errors:
errors.append(field.error_messages['incomplete'])
continue
try:
clean_data.append(field.clean(field_value))
except ValidationError as e:
# Collect all validation errors in a single list, which we'll
# raise at the end of clean(), rather than raising a single
# exception for the first error we encounter. Skip duplicates.
errors.extend(m for m in e.error_list if m not in errors)
if errors:
raise ValidationError(errors)
out = self.compress(clean_data)
self.validate(out)
self.run_validators(out)
return out
def compress(self, data_list):
"""
Returns a single value for the given list of values. The values can be
assumed to be valid.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), this might return a datetime
object created by combining the date and time in data_list.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _has_changed(self, initial, data):
if initial is None:
initial = ['' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.widget.decompress(initial)
for field, initial, data in zip(self.fields, initial, data):
if field._has_changed(field.to_python(initial), data):
return True
return False
class FilePathField(ChoiceField):
def __init__(self, path, match=None, recursive=False, allow_files=True,
allow_folders=False, required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
super(FilePathField, self).__init__(choices=(), required=required,
widget=widget, label=label, initial=initial, help_text=help_text,
*args, **kwargs)
if self.required:
self.choices = []
else:
self.choices = [("", "---------")]
if self.match is not None:
self.match_re = re.compile(self.match)
if recursive:
for root, dirs, files in sorted(os.walk(self.path)):
if self.allow_files:
for f in files:
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
if self.allow_folders:
for f in dirs:
if f == '__pycache__':
continue
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
else:
try:
for f in sorted(os.listdir(self.path)):
if f == '__pycache__':
continue
full_file = os.path.join(self.path, f)
if (((self.allow_files and os.path.isfile(full_file)) or
(self.allow_folders and os.path.isdir(full_file))) and
(self.match is None or self.match_re.search(f))):
self.choices.append((full_file, f))
except OSError:
pass
self.widget.choices = self.choices
class SplitDateTimeField(MultiValueField):
widget = SplitDateTimeWidget
hidden_widget = SplitHiddenDateTimeWidget
default_error_messages = {
'invalid_date': _('Enter a valid date.'),
'invalid_time': _('Enter a valid time.'),
}
def __init__(self, input_date_formats=None, input_time_formats=None, *args, **kwargs):
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
localize = kwargs.get('localize', False)
fields = (
DateField(input_formats=input_date_formats,
error_messages={'invalid': errors['invalid_date']},
localize=localize),
TimeField(input_formats=input_time_formats,
error_messages={'invalid': errors['invalid_time']},
localize=localize),
)
super(SplitDateTimeField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list:
# Raise a validation error if time or date is empty
# (possible if SplitDateTimeField has required=False).
if data_list[0] in self.empty_values:
raise ValidationError(self.error_messages['invalid_date'], code='invalid_date')
if data_list[1] in self.empty_values:
raise ValidationError(self.error_messages['invalid_time'], code='invalid_time')
result = datetime.datetime.combine(*data_list)
return from_current_timezone(result)
return None
class IPAddressField(CharField):
default_validators = [validators.validate_ipv4_address]
def __init__(self, *args, **kwargs):
warnings.warn("IPAddressField has been deprecated. Use GenericIPAddressField instead.",
RemovedInDjango19Warning)
super(IPAddressField, self).__init__(*args, **kwargs)
def to_python(self, value):
if value in self.empty_values:
return ''
return value.strip()
class GenericIPAddressField(CharField):
def __init__(self, protocol='both', unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.default_validators = validators.ip_address_validators(protocol, unpack_ipv4)[0]
super(GenericIPAddressField, self).__init__(*args, **kwargs)
def to_python(self, value):
if value in self.empty_values:
return ''
value = value.strip()
if value and ':' in value:
return clean_ipv6_address(value, self.unpack_ipv4)
return value
class SlugField(CharField):
default_validators = [validators.validate_slug]
def clean(self, value):
value = self.to_python(value).strip()
return super(SlugField, self).clean(value)
| bsd-3-clause | 3,579,217,377,039,823,400 | 38.087099 | 120 | 0.596712 | false | 4.478347 | false | false | false |
Wyliodrin/wyliodrin-server | tests/debugging/sim_board.py | 1 | 3409 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Board
"""
import getpass
import logging
import os
import signal
import sleekxmpp
import ssl
import sys
import threading
import time
from sleekxmpp import Message, Presence
from sleekxmpp.xmlstream import ElementBase
from sleekxmpp.xmlstream import register_stanza_plugin
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
# Python versions before 3.0 do not use UTF-8 encoding
# by default. To ensure that Unicode is handled properly
# throughout SleekXMPP, we will set the default encoding
# ourselves to UTF-8.
if sys.version_info < (3, 0):
from sleekxmpp.util.misc_ops import setdefaultencoding
setdefaultencoding('utf8')
else:
raw_input = input
JID = "[email protected]"
PASS = "wyliodrin"
MESSAGE = None
gdb_commands_pipe_name = "/tmp/gdb_commands"
gdb_results_pipe_name = "/tmp/gdb_results"
class W(ElementBase):
"""
<w xmlns="wyliodrin" d="<msgpack_data>"/>
"""
name = 'w'
namespace = 'wyliodrin'
plugin_attrib = 'w'
interfaces = set(('d',))
class SimBoard(sleekxmpp.ClientXMPP):
def __init__(self, jid, password, pipeout):
sleekxmpp.ClientXMPP.__init__(self, jid, password)
self.pipeout = pipeout
self.add_event_handler("session_start", self.start, threaded=False)
self.register_handler(
Callback('Some custom message',
StanzaPath('message/w'),
self._handle_action))
self.add_event_handler('custom_action',
self._handle_action_event,
threaded=True)
register_stanza_plugin(Message, W)
def start(self, event):
global MESSAGE
# Send priority
prio = self.Presence()
prio['lang'] = None
prio['to'] = None
prio['priority'] = '50'
prio.send()
# Save message
MESSAGE = self.Message()
MESSAGE['lang'] = None
MESSAGE['to'] = "[email protected]"
def _handle_action(self, msg):
self.event('custom_action', msg)
def _handle_action_event(self, msg):
self.pipeout.write(msg['w']['d'])
self.pipeout.flush()
class Listener(threading.Thread):
def __init__(self, pipein):
threading.Thread.__init__(self)
self.pipein = pipein
def run(self):
global MESSAGE
while True:
# Get result
content = os.read(self.pipein.fileno(), 3 * 1024).decode("utf-8")
MESSAGE['w']['d'] = content
MESSAGE.send()
if __name__ == '__main__':
# Create the commands and results pipes
if not os.path.exists(gdb_commands_pipe_name):
os.mkfifo(gdb_commands_pipe_name)
if not os.path.exists(gdb_results_pipe_name):
os.mkfifo(gdb_results_pipe_name)
# Open pipes
gdb_commands_pipe_fd = open(gdb_commands_pipe_name, 'w')
gdb_results_pipe_fd = open(gdb_results_pipe_name, 'r')
listener = Listener(gdb_results_pipe_fd)
listener.start()
# Setup logging.
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)-8s %(message)s')
xmpp = SimBoard(JID, PASS, gdb_commands_pipe_fd)
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0199') # XMPP Ping
xmpp.ssl_version = ssl.PROTOCOL_SSLv3
xmpp.auto_authorize = True
xmpp.auto_subscribe = True
# Connect to the XMPP server and start processing XMPP stanzas.
if xmpp.connect():
xmpp.process(block=True)
print("Done")
else:
print("Unable to connect.")
| lgpl-3.0 | -6,279,385,483,405,430,000 | 21.136364 | 71 | 0.675858 | false | 3.194939 | false | false | false |
mooseman/pdteco | test_parser.py | 1 | 1865 |
# test_parser.py
# Try a few things with creating tokens which know the
# kind of token that should follow them.
import string, itertools
class token(object):
def __init__(self):
self.type = self.next = self.stmttype = None
self.attrdict = vars(self)
# Set an attribute
# NOTE! This can als be used to store values to be passed
# to the next token.
def set(self, attr, val):
setattr(self, attr, val)
# Get an attribute from a token.
def get(self, attr):
return getattr(self, attr)
def display(self):
print self.attrdict
# Test the code
a = token()
a.set('type', 'foo')
a.set('next', 'bar')
a.set('moose', 'big')
print a.get('next')
a.display()
# Create a parser with two modes - character and word.
# Note - we could add a statement checker to this. It would look at the
# stmttype of tokens to determine which kind of statement they belong in.
# When a statement is complete, it can flag that and act accordingly.
# Also - attach actions to statements.
class parser(object):
def __init__(self):
self.toklist = []
self.mode = None
def setmode(self, mode):
self.mode = mode
# Clear the token list
def clear(self):
self.toklist = []
def parse(self, stuff, sep=" "):
if self.mode == 'char':
for ch in stuff:
self.toklist.append(ch)
elif self.mode == 'word':
for tok in stuff.split(sep):
self.toklist.append(tok)
def display(self):
print self.toklist
# Test the code
a = parser()
a.setmode('char')
a.parse('The quick brown fox')
a.display()
a.setmode('word')
a.clear()
a.parse('The quick brown fox')
a.display()
| unlicense | 6,974,440,820,604,117,000 | 21.46988 | 73 | 0.574263 | false | 3.499062 | false | false | false |
jtoppins/beaker | Client/src/bkr/client/task_watcher.py | 1 | 5411 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import sys
import time
__all__ = (
"TaskWatcher",
"watch_tasks"
)
def display_tasklist_status(task_list):
state_dict = {}
for task in task_list:
for state, value in task.get_state_dict().iteritems():
state_dict.setdefault(state, 0)
state_dict[state] += value
print "--> " + " ".join(( "%s: %s" % (key, state_dict[key]) for key in sorted(state_dict) )) + " [total: %s]" % sum(state_dict.values())
def watch_tasks(hub, task_id_list, indentation_level=0, sleep_time=30, task_url=None):
"""Watch the task statuses until they finish."""
if not task_id_list:
return
try:
print "Watching tasks (this may be safely interrupted)..."
watcher = TaskWatcher()
for task_id in sorted(task_id_list):
watcher.task_list.append(Task(hub, task_id, indentation_level))
# print task url if task_url is set or TASK_URL exists in config file
task_url = task_url or hub._conf.get("TASK_URL", None)
if task_url is not None:
print "Task url: %s" % (task_url % task_id)
is_failed = False
while True:
all_done = True
changed = False
for task in watcher.task_list:
changed |= watcher.update(task)
is_failed |= watcher.is_failed(task)
all_done &= watcher.is_finished(task)
if changed:
display_tasklist_status(watcher.task_list)
if all_done:
break
time.sleep(sleep_time)
except KeyboardInterrupt:
running_task_list = [ t.task_id for t in watcher.task_list if not watcher.is_finished(t) ]
if running_task_list:
print "Tasks still running: %s" % running_task_list
# Don't report pass on jobs still running.
is_failed = True
return is_failed
class TaskWatcher(object):
display_tasklist_status = staticmethod(display_tasklist_status)
def __init__(self):
self.subtask_dict = {}
self.task_list = []
def is_finished(self, task):
"""Is the task finished?"""
if task.task_info is None:
return False
result = task.task_info.get("is_finished", False)
for subtask in self.subtask_dict.itervalues():
result &= subtask.is_finished()
return result
def is_failed(self, task):
"""Did the task Fail?"""
if task.task_info is None:
return False
result = task.task_info.get("is_failed", False)
for subtask in self.subtask_dict.itervalues():
result |= subtask.is_failed()
return result
def update(self, task):
"""Update info and log if needed. Returns True on state change."""
if self.is_finished(task):
return False
last = task.task_info
task.task_info = task.hub.taskactions.task_info(task.task_id, False)
if task.task_info is None:
print "No such task id: %s" % task.task_id
sys.exit(1)
changed = False
state = task.task_info["state"]
if last:
# compare and note status changes
laststate = last["state"]
if laststate != state:
print "%s: %s -> %s" % (task, task.display_state(last), task.display_state(task.task_info))
changed = True
else:
# first time we're seeing this task, so just show the current state
print "%s: %s" % (task, task.display_state(task.task_info))
changed = True
# update all subtasks
for key in sorted(self.subtask_dict.keys()):
changed |= self.subtask_dict[key].update()
return changed
class Task(object):
def __init__(self, hub, task_id, indentation_level=0):
self.hub = hub
self.task_id = task_id
self.task_info = None
self.indentation_level = int(indentation_level)
self.subtask_dict = {}
def __str__(self):
result = "%s%s" % (" " * self.indentation_level, self.task_id)
if self.task_info:
result += " %s" % self.task_info.get("method", "unknown")
return result
def is_failed(self):
"""Did the task fail?"""
if self.task_info is None:
return False
return self.task_info.get("is_failed", False)
def display_state(self, task_info):
worker = task_info.get("worker")
if worker is not None:
return "%s (%s)" % (task_info["state_label"], worker["name"])
return "%s" % task_info["state_label"]
def get_state_dict(self):
state_dict = {}
if self.task_info is not None:
state = self.task_info.get("state_label", "unknown")
state_dict.setdefault(state, 0)
state_dict[state] += 1
for subtask in self.subtask_dict.itervalues():
for state, value in subtask.get_state_dict().iteritems():
state_dict.setdefault(state, 0)
state_dict[state] += value
return state_dict
| gpl-2.0 | -5,898,918,360,989,663,000 | 32.196319 | 140 | 0.56958 | false | 3.845771 | false | false | false |
TUM-AERIUS/Aerius | Raspberry/Stereo/photo-client.py | 1 | 1956 | import io
import socket
import struct
import time
import picamera
# Connect a client socket to my_server:8000 (change my_server to the
# hostname of your server)
client_socket = socket.socket()
client_socket.connect(('169.254.251.208', 8000))
# Make a file-like object out of the connection
connection = client_socket.makefile('rwb')
try:
with picamera.PiCamera() as camera:
camera.resolution = (640, 480)
# Start a preview and let the camera warm up for 2 seconds
camera.start_preview()
time.sleep(2)
# Note the start time and construct a stream to hold image data
# temporarily (we could write it directly to connection but in this
# case we want to find out the size of each capture first to keep
# our protocol simple)
start = time.time()
stream = io.BytesIO()
data = struct.unpack('<L', connection.read(struct.calcsize('<L')))[0]
# openCv represents images in bgr format as NumPy arrays
for foo in camera.capture_continuous(stream, format="jpeg"):
# Write the length of the capture to the stream and flush to
# ensure it actually gets sent
connection.write(struct.pack('<L', stream.tell()))
connection.flush()
# Rewind the stream and send the image data over the wire
stream.seek(0)
connection.write(stream.read())
# If we've been capturing for more than 30 seconds, quit
if time.time() - start > 30:
break
# Reset the stream for the next capture
stream.seek(0)
stream.truncate()
data = struct.unpack('<L', connection.read(struct.calcsize('<L')))[0]
if data == "e":
break
# Write a length of zero to the stream to signal we're done
connection.write(struct.pack('<L', 0))
finally:
connection.close()
client_socket.close() | mit | -2,041,568,765,425,169,400 | 35.240741 | 81 | 0.622188 | false | 4.298901 | false | false | false |
lzkelley/zcode | zcode/math/statistic.py | 1 | 25108 | """General functions for mathematical and numerical operations.
Functions
---------
- confidence_bands - Bin by `xx` to calculate confidence intervals in `yy`.
- confidence_intervals - Compute the values bounding desired confidence intervals.
- cumstats - Calculate a cumulative average and standard deviation.
- log_normal_base_10 -
- percentiles -
- stats - Get basic statistics for the given array.
- stats_str - Return a string with the statistics of the given array.
- sigma - Convert from standard deviation to percentiles.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import warnings
import numpy as np
import scipy as sp
import scipy.stats # noqa
from zcode import utils
from zcode.math import math_core
__all__ = [
'confidence_bands', 'confidence_intervals',
'cumstats', 'frac_str', 'info', 'log_normal_base_10', 'mean',
'percs_from_sigma', 'quantiles', 'random_power', 'sigma',
'stats', 'stats_str', 'std',
'LH_Sampler',
# DEPRECATED
'percentiles'
]
def confidence_bands(xx, yy, xbins=10, xscale='lin', percs=[0.68, 0.95], filter=None):
"""Bin the given data with respect to `xx` and calculate confidence intervals in `yy`.
Arguments
---------
xx : array_like scalars
Data values for the axis by which to bin.
yy : array_like scalars
Data values for the axis in which to calculate confidence intervals, with values
corresponding to each of the `xx` values. Must have the same number of elements
as `xx`.
xbins : int or array_like of scalar
Specification for bins in `xx`. Either a
* int, describing the number of bins `N` to create automatically with scale `xscale`.
* array_like scalar, describing the `N+1` edges of each bin (left and right).
xscale : str
Specification of xbin scaling if bins are to be calculated automatically, {'lin', 'log'}.
Ignored if bin edges are given explicitly to `xbins`.
confInt : scalar or array_like of scalar
The percentage confidence intervals to calculate (e.g. 0.5 for median).
Must be between {0.0, 1.0}.
filter : str or `None`
Returns
-------
(for number of bins `N`)
count : (N,) array of int
The number of points in each xbin.
med : (N,) array of float
The median value of points in each bin
conf : array or ndarray of float
Values describing the confidence intervals.
If a single `confInt` is given, this will have shape (N,2);
If `M` `confInt` values are given, this will have shape (N,M,2)
Where in each case the 0th and 1st element of the last dimension is the lower and upper
confidence bounds respectively.
xbins : (N+1,) array of float
Location of bin edges.
"""
squeeze = False
if not np.iterable(percs):
squeeze = True
percs = [percs]
xx = np.asarray(xx).flatten()
yy = np.asarray(yy).flatten()
if xx.shape != yy.shape:
errStr = "Shapes of `xx` and `yy` must match ('{}' vs. '{}'."
errStr = errStr.format(str(xx.shape), str(yy.shape))
raise ValueError(errStr)
# Filter based on whether `yy` values match `filter` comparison to 0.0
if filter is not None:
compFunc = math_core._comparison_function(filter)
inds = compFunc(yy, 0.0)
xx = xx[inds]
yy = yy[inds]
# Create bins
xbins = math_core.asBinEdges(xbins, xx, scale=xscale)
nbins = xbins.size - 1
# Find the entries corresponding to each bin
groups = math_core.groupDigitized(xx, xbins[1:], edges='right')
# Allocate storage for results
med = np.zeros(nbins)
conf = np.zeros((nbins, np.size(percs), 2))
count = np.zeros(nbins, dtype=int)
# Calculate medians and confidence intervals
for ii, gg in enumerate(groups):
count[ii] = np.size(gg)
if count[ii] == 0: continue
mm, cc = confidence_intervals(yy[gg], percs=percs)
med[ii] = mm
conf[ii, ...] = cc[...]
if squeeze:
conf = conf.squeeze()
return count, med, conf, xbins
def confidence_intervals(vals, sigma=None, percs=None, weights=None, axis=None,
filter=None, return_ci=False,
# DEPRECATED ARGUMENTS:
ci=None):
"""Compute the values bounding the target confidence intervals for an array of data.
Arguments
---------
vals : array_like of scalars
Data over which to calculate confidence intervals.
This can be an arbitrarily shaped ndarray.
sigma : (M,) array_like of float
Confidence values as standard-deviations, converted to percentiles.
percs : (M,) array_like of floats
List of desired confidence intervals as fractions (e.g. `[0.68, 0.95]`)
axis : int or None
Axis over which to calculate confidence intervals, or 'None' to marginalize over all axes.
filter : str or `None`
Filter the input array with a boolean comparison to zero.
If no values remain after filtering, ``NaN, NaN`` is returned.
return_ci : bool
Return the confidence-interval values used (i.e. percentiles)
ci : DEPRECATED, use `percs` instead
Returns
-------
med : scalar
Median of the input data.
`None` if there are no values (e.g. after filtering).
conf : ([L, ]M, 2) ndarray of scalar
Bounds for each confidence interval. Shape depends on the number of confidence intervals
passed in `percs`, and the input shape of `vals`.
`None` if there are no values (e.g. after filtering).
If `vals` is 1D or `axis` is 'None', then the output shape will be (M, 2).
If `vals` has more than one-dimension, and `axis` is not 'None', then the shape `L`
will be the shape of `vals`, minus the `axis` axis.
For example,
if ``vals.shape = (4,3,5)` and `axis=1`, then `L = (4,5)`
the final output shape will be: (4,5,M,2).
percs : (M,) ndarray of float, optional
The percentile-values used for calculating confidence intervals.
Only returned if `return_ci` is True.
"""
percs = utils.dep_warn_var("ci", ci, "percs", percs)
if percs is not None and sigma is not None:
raise ValueError("Only provide *either* `percs` or `sigma`!")
if percs is None:
if sigma is None:
sigma = [1.0, 2.0, 3.0]
percs = percs_from_sigma(sigma)
percs = np.atleast_1d(percs)
if np.any(percs < 0.0) or np.all(percs > 1.0):
raise ValueError("`percs` must be [0.0, 1.0]! {}".format(stats_str(percs)))
# PERC_FUNC = np.percentile
def PERC_FUNC(xx, pp, **kwargs):
return quantiles(xx, pp/100.0, weights=weights, **kwargs)
# Filter input values
if filter is not None:
# Using the filter will flatten the array, so `axis` wont work...
kw = {}
if (axis is not None) and np.ndim(vals) > 1:
kw['axis'] = axis
if weights is not None:
raise NotImplementedError("`weights` argument does not work with `filter`!")
vals = math_core.comparison_filter(vals, filter, mask=True) # , **kw)
# vals = np.ma.filled(vals, np.nan)
# PERC_FUNC = np.nanpercentile # noqa
if vals.size == 0:
return np.nan, np.nan
# Calculate confidence-intervals and median
cdf_vals = np.array([(1.0-percs)/2.0, (1.0+percs)/2.0]).T
# This produces an ndarray with shape `[M, 2(, L)]`
# If ``axis is None`` or `np.ndim(vals) == 1` then the shape will be simply `[M, 2]`
# Otherwise, `L` will be the shape of `vals` without axis `axis`.
conf = [[PERC_FUNC(vals, 100.0*cdf[0], axis=axis),
PERC_FUNC(vals, 100.0*cdf[1], axis=axis)]
for cdf in cdf_vals]
conf = np.array(conf)
# Reshape from `[M, 2, L]` to `[L, M, 2]`
if (np.ndim(vals) > 1) and (axis is not None):
conf = np.moveaxis(conf, -1, 0)
med = PERC_FUNC(vals, 50.0, axis=axis)
if len(conf) == 1:
conf = conf[0]
if return_ci:
return med, conf, percs
return med, conf
def cumstats(arr):
"""Calculate a cumulative average and standard deviation.
Arguments
---------
arr <flt>[N] : input array
Returns
-------
ave <flt>[N] : cumulative average over ``arr``
std <flt>[N] : cumulative standard deviation over ``arr``
"""
tot = len(arr)
num = np.arange(tot)
std = np.zeros(tot)
# Cumulative sum
sm1 = np.cumsum(arr)
# Cumulative sum of squares
sm2 = np.cumsum(np.square(arr))
# Cumulative average
ave = sm1/(num+1.0)
std[1:] = np.fabs(sm2[1:] - np.square(sm1[1:])/(num[1:]+1.0))/num[1:]
std[1:] = np.sqrt(std[1:])
return ave, std
def frac_str(num, den=None, frac_fmt=None, dec_fmt=None):
"""Create a string of the form '{}/{} = {}' for reporting fractional values.
"""
if den is None:
assert num.dtype == bool, "If no `den` is given, array must be boolean!"
den = num.size
num = np.count_nonzero(num)
try:
dec_frac = num / den
except ZeroDivisionError:
dec_frac = np.nan
if frac_fmt is None:
frac_exp = np.fabs(np.log10([num, den]))
if np.any(frac_exp >= 4):
frac_fmt = ".1e"
else:
frac_fmt = "d"
if dec_fmt is None:
dec_exp = np.fabs(np.log10(dec_frac))
if dec_exp > 3:
dec_fmt = ".3e"
else:
dec_fmt = ".4f"
fstr = "{num:{ff}}/{den:{ff}} = {frac:{df}}".format(
num=num, den=den, frac=dec_frac, ff=frac_fmt, df=dec_fmt)
return fstr
def info(array, shape=True, sample=3, stats=True):
rv = ""
if shape:
rv += "{} ".format(np.shape(array))
if (sample is not None) and (sample > 0):
rv += "{} ".format(math_core.str_array(array, sides=sample))
if stats:
rv += "{} ".format(stats_str(array, label=False))
return rv
def log_normal_base_10(mu, sigma, size=None, shift=0.0):
"""Draw from a lognormal distribution with values in base-10 (instead of e).
Arguments
---------
mu : (N,) scalar
Mean of the distribution in linear space (e.g. 1.0e8 instead of 8.0).
sigma : (N,) scalar
Variance of the distribution *in dex* (e.g. 1.0 means factor of 10.0 variance)
size : (M,) int
Desired size of sample.
Returns
-------
dist : (M,...) scalar
Resulting distribution of values (in linear space).
"""
_sigma = np.log(10**sigma)
dist = np.random.lognormal(np.log(mu) + shift*np.log(10.0), _sigma, size)
return dist
def mean(vals, weights=None, **kwargs):
if weights is None:
return np.mean(vals, **kwargs)
ave = np.sum(vals*weights, **kwargs) / np.sum(weights, **kwargs)
return ave
def percentiles(*args, **kwargs):
utils.dep_warn("percentiles", newname="quantiles")
return quantiles(*args, **kwargs)
def quantiles(values, percs=None, sigmas=None, weights=None, axis=None,
values_sorted=False, filter=None):
"""Compute weighted percentiles.
Copied from @Alleo answer: http://stackoverflow.com/a/29677616/230468
NOTE: if `values` is a masked array, then only unmasked values are used!
Arguments
---------
values: (N,)
input data
percs: (M,) scalar [0.0, 1.0]
Desired percentiles of the data.
weights: (N,) or `None`
Weighted for each input data point in `values`.
values_sorted: bool
If True, then input values are assumed to already be sorted.
Returns
-------
percs : (M,) float
Array of percentiles of the weighted input data.
"""
if filter is not None:
values = math_core.comparison_filter(values, filter)
if not isinstance(values, np.ma.MaskedArray):
values = np.asarray(values)
if percs is None:
percs = sp.stats.norm.cdf(sigmas)
if np.ndim(values) > 1:
if axis is None:
values = values.flatten()
else:
if axis is not None:
raise ValueError("Cannot act along axis '{}' for 1D data!".format(axis))
percs = np.array(percs)
if weights is None:
weights = np.ones_like(values)
weights = np.array(weights)
try:
weights = np.ma.masked_array(weights, mask=values.mask)
except AttributeError:
pass
assert np.all(percs >= 0.0) and np.all(percs <= 1.0), 'percentiles should be in [0, 1]'
if not values_sorted:
sorter = np.argsort(values, axis=axis)
values = np.take_along_axis(values, sorter, axis=axis)
weights = np.take_along_axis(weights, sorter, axis=axis)
if axis is None:
weighted_quantiles = np.cumsum(weights) - 0.5 * weights
weighted_quantiles /= np.sum(weights)
percs = np.interp(percs, weighted_quantiles, values)
return percs
weights = np.moveaxis(weights, axis, -1)
values = np.moveaxis(values, axis, -1)
weighted_quantiles = np.cumsum(weights, axis=-1) - 0.5 * weights
weighted_quantiles /= np.sum(weights, axis=-1)[..., np.newaxis]
# weighted_quantiles = np.moveaxis(weighted_quantiles, axis, -1)
percs = [np.interp(percs, weighted_quantiles[idx], values[idx])
for idx in np.ndindex(values.shape[:-1])]
percs = np.array(percs)
return percs
def percs_from_sigma(sigma, side='in', boundaries=False):
"""Convert from standard deviation 'sigma' to percentiles in/out-side the normal distribution.
Arguments
---------
sig : (N,) array_like scalar
Standard deviations.
side : str, {'in', 'out'}
Calculate percentiles inside (i.e. [-sig, sig]) or ouside (i.e. [-inf, -sig] U [sig, inf])
boundaries : bool
Whether boundaries should be given ('True'), or the area ('False').
Returns
-------
vals : (N,) array_like scalar
Percentiles corresponding to the input `sig`.
"""
if side.startswith('in'):
inside = True
elif side.startswith('out'):
inside = False
else:
raise ValueError("`side` = '{}' must be {'in', 'out'}.".format(side))
# From CDF from -inf to `sig`
cdf = sp.stats.norm.cdf(sigma)
# Area outside of [-sig, sig]
vals = 2.0 * (1.0 - cdf)
# Convert to area inside [-sig, sig]
if inside:
vals = 1.0 - vals
# Convert from area to locations of boundaries (fractions)
if boundaries:
if inside:
vlo = 0.5*(1 - vals)
vhi = 0.5*(1 + vals)
else:
vlo = 0.5*vals
vhi = 1.0 - 0.5*vals
return vlo, vhi
return vals
def random_power(extr, pdf_index, size=1, **kwargs):
"""Draw from power-law PDF with the given extrema and index.
Arguments
---------
extr : array_like scalar
The minimum and maximum value of this array are used as extrema.
pdf_index : scalar
The power-law index of the PDF distribution to be drawn from. Any real number is valid,
positive or negative.
NOTE: the `numpy.random.power` function uses the power-law index of the CDF, i.e. `g+1`
size : scalar
The number of points to draw (cast to int).
**kwags : dict pairs
Additional arguments passed to `zcode.math_core.minmax` with `extr`.
Returns
-------
rv : (N,) scalar
Array of random variables with N=`size` (default, size=1).
"""
# if not np.isscalar(pdf_index):
# err = "`pdf_index` (shape {}; {}) must be a scalar value!".format(
# np.shape(pdf_index), pdf_index)
# raise ValueError(err)
extr = math_core.minmax(extr, filter='>', **kwargs)
if pdf_index == -1:
rv = 10**np.random.uniform(*np.log10(extr), size=int(size))
else:
rr = np.random.random(size=int(size))
gex = extr ** (pdf_index+1)
rv = (gex[0] + (gex[1] - gex[0])*rr) ** (1./(pdf_index+1))
return rv
def sigma(*args, **kwargs):
# ---- DECPRECATION SECTION ----
utils.dep_warn("sigma", newname="percs_from_sigma")
# ------------------------------
return percs_from_sigma(*args, **kwargs)
def stats(vals, median=False):
"""Get basic statistics for the given array.
Arguments
---------
vals <flt>[N] : input array
median <bool> : include median in return values
Returns
-------
ave <flt>
std <flt>
[med <flt>] : median, returned if ``median`` is `True`
"""
ave = np.average(vals)
std = np.std(vals)
if(median):
med = np.median(vals)
return ave, std, med
return ave, std
def stats_str(data, percs=[0.0, 0.16, 0.50, 0.84, 1.00], ave=False, std=False, weights=None,
format=None, log=False, label=True, label_log=True, filter=None):
"""Return a string with the statistics of the given array.
Arguments
---------
data : ndarray of scalar
Input data from which to calculate statistics.
percs : array_like of scalars in {0, 100}
Which percentiles to calculate.
ave : bool
Include average value in output.
std : bool
Include standard-deviation in output.
format : str
Formatting for all numerical output, (e.g. `":.2f"`).
log : bool
Convert values to log10 before printing.
label : bool
Add label for which percentiles are being printed
label_log : bool
If `log` is also true, append a string saying these are log values.
Output
------
out : str
Single-line string of the desired statistics.
"""
# data = np.array(data).astype(np.float)
data = np.array(data)
if filter is not None:
data = math_core.comparison_filter(data, filter)
if np.size(data) == 0:
return "empty after filtering"
if log:
data = np.log10(data)
percs = np.atleast_1d(percs)
if np.any(percs > 1.0):
warnings.warn("WARNING: zcode.math.statistic: input `percs` should be [0.0, 1.0], "
"dividing these by 100.0!")
percs /= 100.0
percs_flag = False
if (percs is not None) and len(percs):
percs_flag = True
out = ""
if format is None:
allow_int = False if (ave or std) else True
format = math_core._guess_str_format_from_range(data, allow_int=allow_int)
# If a `format` is given, but missing the colon, add the colon
if len(format) and not format.startswith(':'):
format = ':' + format
form = "{{{}}}".format(format)
# Add average
if ave:
out += "ave = " + form.format(np.average(data))
if std or percs_flag:
out += ", "
# Add standard-deviation
if std:
out += "std = " + form.format(np.std(data))
if percs_flag:
out += ", "
# Add percentiles
if percs_flag:
tiles = quantiles(data, percs, weights=weights).astype(data.dtype)
out += "(" + ", ".join(form.format(tt) for tt in tiles) + ")"
if label:
out += ", for (" + ", ".join("{:.0f}%".format(100*pp) for pp in percs) + ")"
# Note if these are log-values
if log and label_log:
out += " (log values)"
return out
def std(vals, weights=None, **kwargs):
"""
See: https://www.itl.nist.gov/div898/software/dataplot/refman2/ch2/weightsd.pdf
"""
if weights is None:
return np.std(vals, **kwargs)
mm = np.count_nonzero(weights)
ave = mean(vals, weights=weights, **kwargs)
num = np.sum(weights * (vals - ave)**2)
den = np.sum(weights) * (mm - 1) / mm
std = np.sqrt(num/den)
return std
class LH_Sampler:
"""
Much of this code was taken from the pyDOE project:
- https://github.com/tisimst/pyDOE
This code was originally published by the following individuals for use with
Scilab:
Copyright (C) 2012 - 2013 - Michael Baudin
Copyright (C) 2012 - Maria Christopoulou
Copyright (C) 2010 - 2011 - INRIA - Michael Baudin
Copyright (C) 2009 - Yann Collette
Copyright (C) 2009 - CEA - Jean-Marc Martinez
website: forge.scilab.org/index.php/p/scidoe/sourcetree/master/macros
Much thanks goes to these individuals. It has been converted to Python by
Abraham Lee.
"""
'''
@classmethod
def oversample(cls, npar, nsamp, oversamp, **kwargs):
if not isinstance(oversamp, int) or oversamp < 1:
raise ValueError(f"`oversamp` argument '{oversamp}' must be an integer!")
samples = None
for ii in range(oversamp):
ss = cls.sample(npar, nsamp=nsamp, **kwargs)
if samples is None:
samples = ss
else:
samples = np.append(samples, ss, axis=-1)
return samples
'''
@classmethod
def sample(cls, vals, nsamp=None, **kwargs):
if isinstance(vals, int):
return cls.sample_unit(vals, nsamp=nsamp, **kwargs)
return cls.sample_vals(vals, nsamp=nsamp, **kwargs)
@classmethod
def sample_vals(cls, vals, nsamp=None, log=False, **kwargs):
vals = np.asarray(vals)
try:
npar, check = np.shape(vals)
if (check != 2) or (npar < 2):
raise ValueError
except ValueError:
print(f"vals = {vals}")
raise ValueError(f"Shape of `vals` ({np.shape(vals)}) must be (N,2)!")
if np.isscalar(log):
log = [log] * npar
if np.any([ll not in [True, False] for ll in log]):
raise ValueError(f"`log` value(s) must be 'True' or 'False'!")
# Draw samples in [0.0, 1.0]
samps = cls.sample_unit(npar, nsamp=nsamp, **kwargs)
# Map samples to the given ranges in log or linear space
for ii, vv in enumerate(vals):
if log[ii]:
vv = np.log10(vv)
# temp = np.copy(samps[ii, :])
# samps[ii, :] *= (vv.max() - vv.min())
# samps[ii, :] += vv.min()
samps[ii, :] = (vv.max() - vv.min()) * samps[ii, :] + vv.min()
if log[ii]:
samps[ii, :] = 10.0 ** samps[ii, :]
vv = 10.0 ** vv
# if np.any((samps[ii] < vv.min()) | (samps[ii] > vv.max())):
# print(f"temp = {temp}")
# print(f"vv = {vv}")
# err = (
# f"Samples ({stats_str(samps[ii])}) exceeded "
# f"values ({math_core.minmax(vv)})"
# )
# raise ValueError(err)
return samps
@classmethod
def sample_unit(cls, npar, nsamp=None, center=False, optimize=None, iterations=10):
if nsamp is None:
nsamp = npar
# Construct optimization variables/functions
optimize = None if (optimize is None) else optimize.lower()
if optimize is not None:
if optimize.startswith('dist'):
extr = 0.0
mask = np.ones((nsamp, nsamp), dtype=bool)
comp = np.less
# Minimum euclidean distance between points
def metric(xx):
dist = (xx[:, np.newaxis, :] - xx[:, :, np.newaxis])**2
dist = np.sum(dist, axis=0)
return np.min(dist[mask])
elif optimize.startswith('corr'):
extr = np.inf
mask = np.ones((npar, npar), dtype=bool)
comp = np.greater
# Maximum correlation
metric = lambda xx: np.max(np.abs(np.corrcoef(xx)[mask]))
np.fill_diagonal(mask, False)
# iterate over randomizations
for ii in range(iterations):
cand = cls._sample(npar, nsamp, center=center)
if optimize is None:
samples = cand
break
# -- Optimize
# Calculate the metric being optimized
met = metric(cand)
# Compare the metric to the previous extrema and store new values if better
if comp(extr, met):
extr = met
samples = cand
return samples
@classmethod
def _sample(cls, npar, nsamp, center=False):
# Generate the intervals
cut = np.linspace(0, 1, nsamp + 1)
lo = cut[:-1]
hi = cut[1:]
# Fill points uniformly in each interval
shape = (npar, nsamp) # , nreals)
if center:
points = np.zeros(shape)
points[...] = 0.5 * (lo + hi)[np.newaxis, :]
else:
points = np.random.uniform(size=shape)
points = points * (hi - lo)[np.newaxis, :] + lo[np.newaxis, :]
for j in range(npar):
points[j, :] = np.random.permutation(points[j, :])
return points
| mit | -1,670,735,747,756,827,600 | 31.439276 | 98 | 0.57416 | false | 3.563946 | false | false | false |
marcok/odoo_modules | hr_employee_time_clock/migrations/11.0.0.0.13/post-migrate.py | 1 | 2402 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2016 - now Bytebrand Outsourcing AG (<http://www.bytebrand.net>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import api, fields, models, SUPERUSER_ID, _
from dateutil import rrule, parser
import pytz
from datetime import datetime, date, timedelta
import calendar
import math
import logging
_logger = logging.getLogger(__name__)
def migrate(cr, version):
"""
This migration is made to calculate running time for each active employee and
write it into last attendance, which has check out. It is important to
companies that already use Employee Time Clock module.
"""
env = api.Environment(cr, SUPERUSER_ID, {})
employee_ids = env['hr.employee'].search([('active', '=', True)])
i = len(employee_ids)
analytic = env['employee.attendance.analytic']
analytic.search([]).unlink()
for employee in employee_ids:
_logger.info('\n')
_logger.info(i)
_logger.info(employee.name)
sheets = env['hr_timesheet_sheet.sheet'].search(
[('employee_id', '=', employee.id)])
for sheet in sheets:
analytic.create_line(
sheet, sheet.date_from, sheet.date_to)
attendances = env['hr.attendance'].search(
[('sheet_id', '=', sheet.id)])
for attendance in attendances:
if attendance.check_out:
analytic.recalculate_line_worktime(
attendance, {'check_out': attendance.check_out})
i -= 1
| agpl-3.0 | -8,948,538,899,177,939,000 | 37.126984 | 84 | 0.609492 | false | 4.320144 | false | false | false |
rajalokan/nova | nova/policies/server_groups.py | 1 | 2174 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-server-groups'
POLICY_ROOT = 'os_compute_api:os-server-groups:%s'
BASE_POLICY_RULE = 'rule:%s' % BASE_POLICY_NAME
server_groups_policies = [
# TODO(Kevin_Zheng): remove this rule as this not used by any API
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
base.create_rule_default(
POLICY_ROOT % 'create',
BASE_POLICY_RULE,
"Create a new server group",
[
{
'path': '/os-server-groups',
'method': 'POST'
}
]
),
base.create_rule_default(
POLICY_ROOT % 'delete',
BASE_POLICY_RULE,
"Delete a server group",
[
{
'path': '/os-server-groups/{server_group_id}',
'method': 'DELETE'
}
]
),
base.create_rule_default(
POLICY_ROOT % 'index',
BASE_POLICY_RULE,
"List all server groups",
[
{
'path': '/os-server-groups',
'method': 'GET'
}
]
),
base.create_rule_default(
POLICY_ROOT % 'show',
BASE_POLICY_RULE,
"Show details of a server group",
[
{
'path': '/os-server-groups/{server_group_id}',
'method': 'GET'
}
]
),
]
def list_rules():
return server_groups_policies
| apache-2.0 | -7,074,195,272,985,386,000 | 26.518987 | 78 | 0.561638 | false | 3.847788 | false | false | false |
smvv/trs | src/rules/fractions.py | 1 | 14707 | # This file is part of TRS (http://math.kompiler.org)
#
# TRS is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# TRS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with TRS. If not, see <http://www.gnu.org/licenses/>.
from itertools import combinations, product, ifilterfalse
from .utils import least_common_multiple, partition, is_numeric_node, \
evals_to_numeric
from ..node import ExpressionNode as N, ExpressionLeaf as L, Scope, OP_DIV, \
OP_ADD, OP_MUL, negate
from ..possibilities import Possibility as P, MESSAGES
from ..translate import _
from .negation import negate_polynome
def match_constant_division(node):
"""
a / 0 -> Division by zero
a / 1 -> a
0 / a -> 0
a / a -> 1
"""
assert node.is_op(OP_DIV)
p = []
nominator, denominator = node
# a / 0
if denominator == 0:
raise ZeroDivisionError('Division by zero: %s.' % node)
# a / 1
if denominator == 1:
p.append(P(node, division_by_one, (nominator,)))
# 0 / a
if nominator == 0:
p.append(P(node, division_of_zero, (denominator,)))
# a / a
if nominator == denominator:
p.append(P(node, division_by_self, (nominator,)))
return p
def division_by_one(root, args):
"""
a / 1 -> a
"""
return args[0].negate(root.negated)
MESSAGES[division_by_one] = _('Division by `1` yields the nominator.')
def division_of_zero(root, args):
"""
0 / a -> 0
"""
return L(0, negated=root.negated)
MESSAGES[division_of_zero] = _('Division of `0` by {1} reduces to `0`.')
def division_by_self(root, args):
"""
a / a -> 1
"""
return L(1, negated=root.negated)
MESSAGES[division_by_self] = _('Division of {1} by itself reduces to `1`.')
def match_add_fractions(node):
"""
a / b + c / b and a, c in Z -> (a + c) / b
a / b + c / d and a, b, c, d in Z -> a' / e + c' / e # e = lcm(b, d)
# | e = b * d
a / b + c and a, b, c in Z -> a / b + (bc) / b # =>* (a + bc) / b
"""
assert node.is_op(OP_ADD)
p = []
scope = Scope(node)
fractions, others = partition(lambda n: n.is_op(OP_DIV), scope)
numerics = filter(is_numeric_node, others)
for ab, cd in combinations(fractions, 2):
a, b = ab
c, d = cd
if b == d:
# Equal denominators, add nominators to create a single fraction
p.append(P(node, add_nominators, (scope, ab, cd)))
elif all(map(is_numeric_node, (a, b, c, d))):
# Denominators are both numeric, rewrite both fractions to the
# least common multiple of their denominators. Later, the
# nominators will be added
lcm = least_common_multiple(b.value, d.value)
p.append(P(node, equalize_denominators, (scope, ab, cd, lcm)))
# Also, add the (non-recommended) possibility to multiply the
# denominators. Do this only if the multiplication is not equal to
# the least common multiple, to avoid duplicate possibilities
mult = b.value * d.value
if mult != lcm:
p.append(P(node, equalize_denominators, (scope, ab, cd, mult)))
for ab, c in product(fractions, numerics):
a, b = ab
if a.is_numeric() and b.is_numeric():
# Fraction of constants added to a constant -> create a single
# constant fraction
p.append(P(node, constant_to_fraction, (scope, ab, c)))
return p
def add_nominators(root, args):
"""
a / b + c / b and a, c in Z -> (a + c) / b
"""
scope, ab, cb = args
a, b = ab
c = cb[0]
# Replace the left node with the new expression, transfer fraction
# negations to nominators
scope.replace(ab, (a.negate(ab.negated) + c.negate(cb.negated)) / b)
scope.remove(cb)
return scope.as_nary_node()
MESSAGES[add_nominators] = \
_('Add the nominators of {2} and {3} to create a single fraction.')
def equalize_denominators(root, args):
"""
a / b + c / d and a, b, c, d in Z -> a' / e + c' / e
"""
scope, denom = args[::3]
for fraction in args[1:3]:
n, d = fraction
mult = denom / d.value
if mult != 1:
if n.is_numeric():
nom = L(n.value * mult)
else:
nom = L(mult) * n
scope.replace(fraction, negate(nom / L(d.value * mult),
fraction.negated))
return scope.as_nary_node()
MESSAGES[equalize_denominators] = \
_('Equalize the denominators of divisions {2} and {3} to {4}.')
def constant_to_fraction(root, args):
"""
a / b + c and a, b, c in Z -> a / b + (bc) / b # =>* (a + bc) / b
"""
scope, ab, c = args
b = ab[1]
scope.replace(c, b * c / b)
return scope.as_nary_node()
MESSAGES[constant_to_fraction] = \
_('Rewrite constant {3} to a fraction to be able to add it to {2}.')
def match_multiply_fractions(node):
"""
a / b * c / d -> (ac) / (bd)
a / b * c and (eval(c) in Z or eval(a / b) not in Z) -> (ac) / b
"""
assert node.is_op(OP_MUL)
p = []
scope = Scope(node)
fractions, others = partition(lambda n: n.is_op(OP_DIV), scope)
for ab, cd in combinations(fractions, 2):
p.append(P(node, multiply_fractions, (scope, ab, cd)))
for ab, c in product(fractions, others):
if evals_to_numeric(c) or not evals_to_numeric(ab):
p.append(P(node, multiply_with_fraction, (scope, ab, c)))
return p
def multiply_fractions(root, args):
"""
a / b * (c / d) -> ac / (bd)
"""
scope, ab, cd = args
a, b = ab
c, d = cd
scope.replace(ab, (a * c / (b * d)).negate(ab.negated + cd.negated))
scope.remove(cd)
return scope.as_nary_node()
MESSAGES[multiply_fractions] = _('Multiply fractions {2} and {3}.')
def multiply_with_fraction(root, args):
"""
a / b * c and (eval(c) in Z or eval(a / b) not in Z) -> (ac) / b
"""
scope, ab, c = args
a, b = ab
if scope.index(ab) < scope.index(c):
nominator = a * c
else:
nominator = c * a
scope.replace(ab, negate(nominator / b, ab.negated))
scope.remove(c)
return scope.as_nary_node()
MESSAGES[multiply_with_fraction] = \
_('Multiply {3} with the nominator of fraction {2}.')
def match_divide_fractions(node):
"""
Reduce divisions of fractions to a single fraction.
Examples:
a / b / c -> a / (bc)
a / (b / c) -> ac / b
Note that:
a / b / (c / d) => ad / bd
"""
assert node.is_op(OP_DIV)
nom, denom = node
p = []
if nom.is_op(OP_DIV):
p.append(P(node, divide_fraction, tuple(nom) + (denom,)))
if denom.is_op(OP_DIV):
p.append(P(node, divide_by_fraction, (nom,) + tuple(denom)))
return p
def divide_fraction(root, args):
"""
a / b / c -> a / (bc)
"""
(a, b), c = root
return negate(a / (b * c), root.negated)
MESSAGES[divide_fraction] = \
_('Move {3} to denominator of fraction `{1} / {2}`.')
def divide_by_fraction(root, args):
"""
a / (b / c) -> ac / b
"""
a, bc = root
b, c = bc
return negate(a * c / b, root.negated + bc.negated)
MESSAGES[divide_by_fraction] = \
_('Move {3} to the nominator of fraction `{1} / {2}`.')
def is_power_combination(a, b):
"""
Check if two nodes are powers that can be combined in a fraction, for
example:
a and a^2
a^2 and a^2
a^2 and a
"""
if a.is_power():
a = a[0]
if b.is_power():
b = b[0]
return a == b
def mult_scope(node):
"""
Get the multiplication scope of a node that may or may no be a
multiplication itself.
"""
if node.is_op(OP_MUL):
return Scope(node)
return Scope(N(OP_MUL, node))
def remove_from_mult_scope(scope, node):
if len(scope) == 1:
scope.replace(node, L(1))
else:
scope.remove(node)
return scope.as_nary_node()
def match_extract_fraction_terms(node):
"""
Divide nominator and denominator by the same part. If the same root of a
power appears in both nominator and denominator, also extract it so that it
can be reduced to a single power by power division rules.
Examples:
ab / (ac) -> a / a * (c / e) # =>* c / e
a ^ b * c / (a ^ d * e) -> a ^ b / a ^ d * (c / e) # -> a^(b - d)(c / e)
ac / b and eval(c) not in Z and eval(a / b) in Z -> a / b * c
"""
assert node.is_op(OP_DIV)
n_scope, d_scope = map(mult_scope, node)
p = []
nominator, denominator = node
# ac / b
for n in ifilterfalse(evals_to_numeric, n_scope):
a_scope = mult_scope(nominator)
#a = remove_from_mult_scope(a_scope, n)
if len(a_scope) == 1:
a = L(1)
else:
a = a_scope.all_except(n)
if evals_to_numeric(a / denominator):
p.append(P(node, extract_nominator_term, (a, n)))
if len(n_scope) == 1 and len(d_scope) == 1:
return p
# a ^ b * c / (a ^ d * e)
for n, d in product(n_scope, d_scope):
if n == d:
handler = divide_fraction_by_term
elif is_power_combination(n, d):
handler = extract_fraction_terms
else:
continue # pragma: nocover
p.append(P(node, handler, (n_scope, d_scope, n, d)))
return p
def extract_nominator_term(root, args):
"""
ac / b and eval(c) not in Z and eval(a / b) in Z -> a / b * c
"""
a, c = args
return negate(a / root[1] * c, root.negated)
MESSAGES[extract_nominator_term] = \
_('Extract {2} from the nominator of fraction {0}.')
def extract_fraction_terms(root, args):
"""
a ^ b * c / (a ^ d * e) -> a ^ b / a ^ d * (c / e)
"""
n_scope, d_scope, n, d = args
div = n / d * (remove_from_mult_scope(n_scope, n) \
/ remove_from_mult_scope(d_scope, d))
return negate(div, root.negated)
MESSAGES[extract_fraction_terms] = _('Extract `{3} / {4}` from fraction {0}.')
def divide_fraction_by_term(root, args):
"""
ab / a -> b
a / (ba) -> 1 / b
a * c / (ae) -> c / e
"""
n_scope, d_scope, n, d = args
nom = remove_from_mult_scope(n_scope, n)
d_scope.remove(d)
if not len(d_scope):
return negate(nom, root.negated)
return negate(nom / d_scope.as_nary_node(), root.negated)
MESSAGES[divide_fraction_by_term] = \
_('Divide nominator and denominator of {0} by {2}.')
def match_division_in_denominator(node):
"""
a / (b / c + d) -> (ca) / (c(b / c + d))
"""
assert node.is_op(OP_DIV)
denom = node[1]
if not denom.is_op(OP_ADD):
return []
return [P(node, multiply_with_term, (n[1],))
for n in Scope(denom) if n.is_op(OP_DIV)]
def multiply_with_term(root, args):
"""
a / (b / c + d) -> (ca) / (c(b / c + d))
"""
c = args[0]
nom, denom = root
return negate(c * nom / (c * denom), root.negated)
MESSAGES[multiply_with_term] = \
_('Multiply nominator and denominator of {0} with {1}.')
def match_combine_fractions(node):
"""
a/b + c/d -> ad/(bd) + bc/(bd) # -> (ad + bc)/(bd)
"""
assert node.is_op(OP_ADD)
scope = Scope(node)
fractions = [n for n in scope if n.is_op(OP_DIV)]
p = []
for left, right in combinations(fractions, 2):
p.append(P(node, combine_fractions, (scope, left, right)))
return p
def combine_fractions(root, args):
"""
a/b + c/d -> ad/(bd) + bc/(bd)
"""
scope, ab, cd = args
(a, b), (c, d) = ab, cd
a = negate(a, ab.negated)
d = negate(d, cd.negated)
scope.replace(ab, a * d / (b * d) + b * c / (b * d))
scope.remove(cd)
return scope.as_nary_node()
MESSAGES[combine_fractions] = _('Combine fraction {2} and {3}.')
def match_remove_division_negation(node):
"""
-a / (-b + c) -> a / (--b - c)
"""
assert node.is_op(OP_DIV)
nom, denom = node
if node.negated:
if nom.is_op(OP_ADD) and any([n.negated for n in Scope(nom)]):
return [P(node, remove_division_negation, (True, nom))]
if denom.is_op(OP_ADD) and any([n.negated for n in Scope(denom)]):
return [P(node, remove_division_negation, (False, denom))]
return []
def remove_division_negation(root, args):
"""
-a / (-b + c) -> a / (--b - c)
"""
nom, denom = root
if args[0]:
nom = negate_polynome(nom, ())
else:
denom = negate_polynome(denom, ())
return negate(nom / denom, root.negated - 1)
MESSAGES[remove_division_negation] = \
_('Move negation from fraction {0} to polynome {2}.')
def match_fraction_in_division(node):
"""
(1 / a * b) / c -> b / (ac)
c / (1 / a * b) -> (ac) / b
"""
assert node.is_op(OP_DIV)
nom, denom = node
p = []
if nom.is_op(OP_MUL):
scope = Scope(nom)
for n in scope:
if n.is_op(OP_DIV) and n[0] == 1:
p.append(P(node, fraction_in_division, (True, scope, n)))
if denom.is_op(OP_MUL):
scope = Scope(denom)
for n in scope:
if n.is_op(OP_DIV) and n[0] == 1:
p.append(P(node, fraction_in_division, (False, scope, n)))
return p
def fraction_in_division(root, args):
"""
(1 / a * b) / c -> b / (ac)
c / (1 / a * b) -> (ac) / b
"""
is_nominator, scope, fraction = args
nom, denom = root
if fraction.negated or fraction[0].negated:
scope.replace(fraction, fraction[0].negate(fraction.negated))
else:
scope.remove(fraction)
if is_nominator:
nom = scope.as_nary_node()
denom = fraction[1] * denom
else:
nom = fraction[1] * nom
denom = scope.as_nary_node()
return negate(nom / denom, root.negated)
MESSAGES[fraction_in_division] = \
_('Multiply both sides of fraction {0} with {3[1]}.')
| agpl-3.0 | -6,291,606,106,984,690,000 | 23.96944 | 79 | 0.540899 | false | 3.081291 | false | false | false |
dsysoev/fun-with-algorithms | queue/maxheap.py | 1 | 3185 |
"""
Max heap implementation
https://en.wikipedia.org/wiki/Min-max_heap
Algorithm Average
Build heap O(n)
"""
from __future__ import print_function
from math import log, ceil
class MaxHeap(object):
""" Binary Max heap implementation """
def __init__(self):
self.__data = []
def max_heapify(self, start):
""" function with which to save the properties of the heap """
left = self.left_child(start)
right = self.right_child(start)
size = self.heap_size()
if left < size and self.__data[left] > self.__data[start]:
largest = left
elif right < size:
largest = right
else:
return
if right < size and self.__data[right] > self.__data[largest]:
largest = right
if largest != start and self.__data[start] < self.__data[largest]:
self.__data[start], self.__data[largest] = self.__data[largest], self.__data[start]
self.max_heapify(largest)
def add_list(self, lst):
""" add list of elements into the heap """
self.__data += lst
for index in range(self.parent(self.heap_size() - 1), -1, -1):
self.max_heapify(index)
def add(self, value):
""" add one element into the heap """
self.add_list([value])
def extract_max(self):
""" return maximum element from the heap """
value = self.__data[0]
del self.__data[0]
for position in range(self.parent(self.heap_size() - 1), -1, -1):
self.max_heapify(position)
return value
def heap_size(self):
""" return number of elements in the heap """
return len(self.__data)
def parent(self, index):
""" return parent index """
return (index + 1) // 2 - 1
def left_child(self, index):
""" return index of left child """
return 2 * index + 1
def right_child(self, index):
""" return index of right child """
return 2 * index + 2
def __str__(self):
# string lenght for center
strlen = 2 * 2 ** ceil(log(self.heap_size(), 2))
maxlevel = int(log(self.heap_size(), 2)) + 1
# add root element to string
string = str([self.__data[0]]).center(strlen) + '\n'
for index in range(1, maxlevel):
# get list of elements for current level
lst = self.__data[2 ** index - 1:2 ** (index + 1) - 1]
if index == maxlevel - 1:
# without center for last line
string += str(lst) + '\n'
else:
string += str(lst).center(strlen) + '\n'
return string
if __name__ in "__main__":
HEAP = MaxHeap()
LIST = [4, 1, 3, 2, 16, 9, 10, 14, 8, 7]
print("Build heap from list: {}".format(LIST))
HEAP.add_list(LIST)
print("Show heap:\n{}".format(HEAP))
for VALUE in [100]:
print("Add new element {}".format(VALUE))
HEAP.add(VALUE)
print("Show heap:\n{}".format(HEAP))
for _ in range(2):
MAX = HEAP.extract_max()
print("Extract max element: {}".format(MAX))
print("Show heap:\n{}".format(HEAP))
| mit | 5,271,140,296,426,704,000 | 29.92233 | 95 | 0.539403 | false | 3.747059 | false | false | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/errors/types/billing_setup_error.py | 1 | 1902 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.errors',
marshal='google.ads.googleads.v7',
manifest={
'BillingSetupErrorEnum',
},
)
class BillingSetupErrorEnum(proto.Message):
r"""Container for enum describing possible billing setup errors. """
class BillingSetupError(proto.Enum):
r"""Enum describing possible billing setup errors."""
UNSPECIFIED = 0
UNKNOWN = 1
CANNOT_USE_EXISTING_AND_NEW_ACCOUNT = 2
CANNOT_REMOVE_STARTED_BILLING_SETUP = 3
CANNOT_CHANGE_BILLING_TO_SAME_PAYMENTS_ACCOUNT = 4
BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_STATUS = 5
INVALID_PAYMENTS_ACCOUNT = 6
BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_CATEGORY = 7
INVALID_START_TIME_TYPE = 8
THIRD_PARTY_ALREADY_HAS_BILLING = 9
BILLING_SETUP_IN_PROGRESS = 10
NO_SIGNUP_PERMISSION = 11
CHANGE_OF_BILL_TO_IN_PROGRESS = 12
PAYMENTS_PROFILE_NOT_FOUND = 13
PAYMENTS_ACCOUNT_NOT_FOUND = 14
PAYMENTS_PROFILE_INELIGIBLE = 15
PAYMENTS_ACCOUNT_INELIGIBLE = 16
CUSTOMER_NEEDS_INTERNAL_APPROVAL = 17
PAYMENTS_ACCOUNT_INELIGIBLE_CURRENCY_CODE_MISMATCH = 19
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 2,008,635,368,135,882,800 | 34.886792 | 75 | 0.685594 | false | 3.56848 | false | false | false |
ojii/django-nani | hvad/tests/forms.py | 1 | 5295 | # -*- coding: utf-8 -*-
from django.core.exceptions import FieldError
from hvad.forms import TranslatableModelForm, TranslatableModelFormMetaclass
from hvad.test_utils.context_managers import LanguageOverride
from hvad.test_utils.testcase import NaniTestCase
from testproject.app.models import Normal
from django.db import models
class NormalForm(TranslatableModelForm):
class Meta:
model = Normal
fields = ['shared_field', 'translated_field']
class NormalMediaForm(TranslatableModelForm):
class Meta:
model = Normal
class Media:
css = {
'all': ('layout.css',)
}
class NormalFormExclude(TranslatableModelForm):
class Meta:
model = Normal
exclude = ['shared_field']
class FormTests(NaniTestCase):
def test_nontranslatablemodelform(self):
# Make sure that TranslatableModelForm won't accept a regular model
# "Fake" model to use for the TranslatableModelForm
class NonTranslatableModel(models.Model):
field = models.CharField(max_length=128)
# Meta class for use below
class Meta:
model = NonTranslatableModel
# Make sure we do indeed get an exception, if we try to initialise it
self.assertRaises(TypeError,
TranslatableModelFormMetaclass,
'NonTranslatableModelForm', (TranslatableModelForm,),
{'Meta': Meta}
)
def test_normal_model_form_instantiation(self):
# Basic example and checking it gives us all the fields needed
form = NormalForm()
self.assertTrue("translated_field" in form.fields)
self.assertTrue("shared_field" in form.fields)
self.assertTrue("translated_field" in form.base_fields)
self.assertTrue("shared_field" in form.base_fields)
self.assertFalse(form.is_valid())
# Check if it works with media argument too
form = NormalMediaForm()
self.assertFalse(form.is_valid())
self.assertTrue("layout.css" in str(form.media))
# Check if it works with an instance of Normal
form = NormalForm(instance=Normal())
self.assertFalse(form.is_valid())
def test_normal_model_form_valid(self):
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data)
self.assertTrue(form.is_valid(), form.errors.as_text())
self.assertTrue("translated_field" in form.fields)
self.assertTrue("shared_field" in form.fields)
self.assertTrue(TRANSLATED in form.clean()["translated_field"])
self.assertTrue(SHARED in form.clean()["shared_field"])
def test_normal_model_form_initaldata_instance(self):
# Check if it accepts inital data and instance
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data, instance=Normal(), initial=data)
self.assertTrue(form.is_valid(), form.errors.as_text())
def test_normal_model_form_existing_instance(self):
# Check if it works with an existing instance of Normal
SHARED = 'Shared'
TRANSLATED = 'English'
instance = Normal.objects.language("en").create(shared_field=SHARED, translated_field=TRANSLATED)
form = NormalForm(instance=instance)
self.assertFalse(form.is_valid())
self.assertTrue(SHARED in form.as_p())
self.assertTrue(TRANSLATED in form.as_p())
def test_normal_model_form_save(self):
with LanguageOverride('en'):
SHARED = 'Shared'
TRANSLATED = 'English'
data = {
'shared_field': SHARED,
'translated_field': TRANSLATED,
'language_code': 'en'
}
form = NormalForm(data)
# tested a non-translated ModelForm, and that takes 7 queries.
with self.assertNumQueries(2):
obj = form.save()
with self.assertNumQueries(0):
self.assertEqual(obj.shared_field, SHARED)
self.assertEqual(obj.translated_field, TRANSLATED)
self.assertNotEqual(obj.pk, None)
def test_no_language_code_in_fields(self):
with LanguageOverride("en"):
form = NormalForm()
self.assertFalse(form.fields.has_key("language_code"))
form = NormalMediaForm()
self.assertFalse(form.fields.has_key("language_code"))
form = NormalFormExclude()
self.assertFalse(form.fields.has_key("language_code"))
def test_form_wrong_field_in_class(self):
with LanguageOverride("en"):
def create_wrong_form():
class WrongForm(TranslatableModelForm):
class Meta:
model = Normal
fields = ['a_field_that_doesnt_exist']
form = WrongForm()
self.assertRaises(FieldError, create_wrong_form)
| bsd-3-clause | -6,615,196,495,451,526,000 | 36.821429 | 105 | 0.605666 | false | 4.322449 | true | false | false |
jdemel/gnuradio | gnuradio-runtime/python/gnuradio/gr/tag_utils.py | 1 | 5013 | from __future__ import unicode_literals
import pmt
from . import gr_python as gr
class PythonTag(object):
" Python container for tags "
def __init__(self):
self.offset = None
self.key = None
self.value = None
self.srcid = False
def tag_to_python(tag):
""" Convert a stream tag to a Python-readable object """
newtag = PythonTag()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.to_python(tag.value)
newtag.srcid = pmt.to_python(tag.srcid)
return newtag
def tag_to_pmt(tag):
""" Convert a Python-readable object to a stream tag """
newtag = gr.tag_t()
newtag.offset = tag.offset
newtag.key = pmt.to_python(tag.key)
newtag.value = pmt.from_python(tag.value)
newtag.srcid = pmt.from_python(tag.srcid)
return newtag
def python_to_tag(tag_struct):
"""
Convert a Python list/tuple/dictionary to a stream tag.
When using a list or tuple format, this function expects the format:
tag_struct[0] --> tag's offset (as an integer)
tag_struct[1] --> tag's key (as a PMT)
tag_struct[2] --> tag's value (as a PMT)
tag_struct[3] --> tag's srcid (as a PMT)
When using a dictionary, we specify the dictionary keys using:
tag_struct['offset'] --> tag's offset (as an integer)
tag_struct['key'] --> tag's key (as a PMT)
tag_struct['value'] --> tag's value (as a PMT)
tag_struct['srcid'] --> tag's srcid (as a PMT)
If the function can take the Python object and successfully
construct a tag, it will return the tag. Otherwise, it will return
None.
"""
good = False
tag = gr.tag_t()
if(type(tag_struct) == dict):
if('offset' in tag_struct):
if(isinstance(tag_struct['offset'], int)):
tag.offset = tag_struct['offset']
good = True
if('key' in tag_struct):
if(isinstance(tag_struct['key'], pmt.pmt_base)):
tag.key = tag_struct['key']
good = True
if('value' in tag_struct):
if(isinstance(tag_struct['value'], pmt.pmt_base)):
tag.value = tag_struct['value']
good = True
if('srcid' in tag_struct):
if(isinstance(tag_struct['srcid'], pmt.pmt_base)):
tag.srcid = tag_struct['srcid']
good = True
elif(type(tag_struct) == list or type(tag_struct) == tuple):
if(len(tag_struct) == 4):
if(isinstance(tag_struct[0], int)):
tag.offset = tag_struct[0]
good = True
if(isinstance(tag_struct[1], pmt.pmt_base)):
tag.key = tag_struct[1]
good = True
if(isinstance(tag_struct[2], pmt.pmt_base)):
tag.value = tag_struct[2]
good = True
if(isinstance(tag_struct[3], pmt.pmt_base)):
tag.srcid = tag_struct[3]
good = True
elif(len(tag_struct) == 3):
if(isinstance(tag_struct[0], int)):
tag.offset = tag_struct[0]
good = True
if(isinstance(tag_struct[1], pmt.pmt_base)):
tag.key = tag_struct[1]
good = True
if(isinstance(tag_struct[2], pmt.pmt_base)):
tag.value = tag_struct[2]
good = True
tag.srcid = pmt.PMT_F
if(good):
return tag
else:
return None
def tag_t_offset_compare_key():
"""
Convert a tag_t.offset_compare function into a key=function
This method is modeled after functools.cmp_to_key(_func_).
It can be used by functions that accept a key function, such as
sorted(), min(), max(), etc. to compare tags by their offsets,
e.g., sorted(tag_list, key=gr.tag_t.offset_compare_key()).
"""
class K(object):
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
# x.offset < y.offset
return gr.tag_t.offset_compare(self.obj, other.obj)
def __gt__(self, other):
# y.offset < x.offset
return gr.tag_t.offset_compare(other.obj, self.obj)
def __eq__(self, other):
# not (x.offset < y.offset) and not (y.offset < x.offset)
return not gr.tag_t.offset_compare(self.obj, other.obj) and \
not gr.tag_t.offset_compare(other.obj, self.obj)
def __le__(self, other):
# not (y.offset < x.offset)
return not gr.tag_t.offset_compare(other.obj, self.obj)
def __ge__(self, other):
# not (x.offset < y.offset)
return not gr.tag_t.offset_compare(self.obj, other.obj)
def __ne__(self, other):
# (x.offset < y.offset) or (y.offset < x.offset)
return gr.tag_t.offset_compare(self.obj, other.obj) or \
gr.tag_t.offset_compare(other.obj, self.obj)
return K
| gpl-3.0 | 2,668,344,318,893,686,300 | 33.8125 | 73 | 0.549172 | false | 3.53277 | false | false | false |
Workday/OpenFrame | native_client_sdk/src/build_tools/build_sdk.py | 1 | 36370 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for both build and try bots.
This script is invoked from XXX, usually without arguments
to package an SDK. It automatically determines whether
this SDK is for mac, win, linux.
The script inspects the following environment variables:
BUILDBOT_BUILDERNAME to determine whether the script is run locally
and whether it should upload an SDK to file storage (GSTORE)
"""
# pylint: disable=W0621
# std python includes
import argparse
import datetime
import glob
import os
import re
import sys
if sys.version_info < (2, 7, 0):
sys.stderr.write("python 2.7 or later is required run this script\n")
sys.exit(1)
# local includes
import buildbot_common
import build_projects
import build_updater
import build_version
import generate_notice
import manifest_util
import parse_dsc
import verify_filelist
from build_paths import SCRIPT_DIR, SDK_SRC_DIR, SRC_DIR, NACL_DIR, OUT_DIR
from build_paths import NACLPORTS_DIR, GSTORE, GONACL_APPENGINE_SRC_DIR
# Add SDK make tools scripts to the python path.
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
sys.path.append(os.path.join(NACL_DIR, 'build'))
import getos
import oshelpers
BUILD_DIR = os.path.join(NACL_DIR, 'build')
NACL_TOOLCHAIN_DIR = os.path.join(NACL_DIR, 'toolchain')
NACL_TOOLCHAINTARS_DIR = os.path.join(NACL_TOOLCHAIN_DIR, '.tars')
CYGTAR = os.path.join(BUILD_DIR, 'cygtar.py')
PKGVER = os.path.join(BUILD_DIR, 'package_version', 'package_version.py')
NACLPORTS_URL = 'https://chromium.googlesource.com/external/naclports.git'
NACLPORTS_REV = '65c71c1524a74ff8415573e5e5ef7c59ce4ac437'
GYPBUILD_DIR = 'gypbuild'
options = None
# Map of: ToolchainName: (PackageName, SDKDir, arch).
TOOLCHAIN_PACKAGE_MAP = {
'arm_glibc': ('nacl_arm_glibc', '%(platform)s_arm_glibc', 'arm'),
'x86_glibc': ('nacl_x86_glibc', '%(platform)s_x86_glibc', 'x86'),
'pnacl': ('pnacl_newlib', '%(platform)s_pnacl', 'pnacl')
}
def GetToolchainDirName(tcname):
"""Return the directory name for a given toolchain"""
return TOOLCHAIN_PACKAGE_MAP[tcname][1] % {'platform': getos.GetPlatform()}
def GetToolchainDir(pepperdir, tcname):
"""Return the full path to a given toolchain within a given sdk root"""
return os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
def GetToolchainLibc(tcname):
if tcname == 'pnacl':
return 'newlib'
for libc in ('glibc', 'newlib', 'host'):
if libc in tcname:
return libc
def GetToolchainNaClInclude(pepperdir, tcname, arch=None):
tcpath = GetToolchainDir(pepperdir, tcname)
if arch is None:
arch = TOOLCHAIN_PACKAGE_MAP[tcname][2]
if arch == 'x86':
return os.path.join(tcpath, 'x86_64-nacl', 'include')
elif arch == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'include')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'include')
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
def GetConfigDir(arch):
if arch.endswith('x64') and getos.GetPlatform() == 'win':
return 'Release_x64'
else:
return 'Release'
def GetNinjaOutDir(arch):
return os.path.join(OUT_DIR, GYPBUILD_DIR + '-' + arch, GetConfigDir(arch))
def GetGypBuiltLib(tcname, arch):
if arch == 'ia32':
lib_suffix = '32'
elif arch == 'x64':
lib_suffix = '64'
elif arch == 'arm':
lib_suffix = 'arm'
else:
lib_suffix = ''
tcdir = 'tc_' + GetToolchainLibc(tcname)
if tcname == 'pnacl':
if arch is None:
lib_suffix = ''
tcdir = 'tc_pnacl_newlib'
arch = 'x64'
else:
arch = 'clang-' + arch
return os.path.join(GetNinjaOutDir(arch), 'gen', tcdir, 'lib' + lib_suffix)
def GetToolchainNaClLib(tcname, tcpath, arch):
if arch == 'ia32':
return os.path.join(tcpath, 'x86_64-nacl', 'lib32')
elif arch == 'x64':
return os.path.join(tcpath, 'x86_64-nacl', 'lib')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'lib')
elif tcname == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'lib')
def GetOutputToolchainLib(pepperdir, tcname, arch):
tcpath = os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
return GetToolchainNaClLib(tcname, tcpath, arch)
def GetPNaClTranslatorLib(tcpath, arch):
if arch not in ['arm', 'x86-32', 'x86-64']:
buildbot_common.ErrorExit('Unknown architecture %s.' % arch)
return os.path.join(tcpath, 'translator', arch, 'lib')
def BuildStepDownloadToolchains(toolchains):
buildbot_common.BuildStep('Running package_version.py')
args = [sys.executable, PKGVER, '--mode', 'nacl_core_sdk']
args.extend(['sync', '--extract'])
buildbot_common.Run(args, cwd=NACL_DIR)
def BuildStepCleanPepperDirs(pepperdir, pepperdir_old):
buildbot_common.BuildStep('Clean Pepper Dirs')
dirs_to_remove = (
pepperdir,
pepperdir_old,
os.path.join(OUT_DIR, 'arm_trusted')
)
for dirname in dirs_to_remove:
if os.path.exists(dirname):
buildbot_common.RemoveDir(dirname)
buildbot_common.MakeDir(pepperdir)
def BuildStepMakePepperDirs(pepperdir, subdirs):
for subdir in subdirs:
buildbot_common.MakeDir(os.path.join(pepperdir, subdir))
TEXT_FILES = [
'AUTHORS',
'COPYING',
'LICENSE',
'README.Makefiles',
'getting_started/README',
]
def BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision,
nacl_revision):
buildbot_common.BuildStep('Add Text Files')
InstallFiles(SDK_SRC_DIR, pepperdir, TEXT_FILES)
# Replace a few placeholders in README
readme_text = open(os.path.join(SDK_SRC_DIR, 'README')).read()
readme_text = readme_text.replace('${VERSION}', pepper_ver)
readme_text = readme_text.replace('${CHROME_REVISION}', chrome_revision)
readme_text = readme_text.replace('${CHROME_COMMIT_POSITION}',
build_version.ChromeCommitPosition())
readme_text = readme_text.replace('${NACL_REVISION}', nacl_revision)
# Year/Month/Day Hour:Minute:Second
time_format = '%Y/%m/%d %H:%M:%S'
readme_text = readme_text.replace('${DATE}',
datetime.datetime.now().strftime(time_format))
open(os.path.join(pepperdir, 'README'), 'w').write(readme_text)
def BuildStepUntarToolchains(pepperdir, toolchains):
buildbot_common.BuildStep('Untar Toolchains')
platform = getos.GetPlatform()
build_platform = '%s_x86' % platform
tmpdir = os.path.join(OUT_DIR, 'tc_temp')
buildbot_common.RemoveDir(tmpdir)
buildbot_common.MakeDir(tmpdir)
# Create a list of extract packages tuples, the first part should be
# "$PACKAGE_TARGET/$PACKAGE". The second part should be the destination
# directory relative to pepperdir/toolchain.
extract_packages = []
for toolchain in toolchains:
toolchain_map = TOOLCHAIN_PACKAGE_MAP.get(toolchain, None)
if toolchain_map:
package_name, tcdir, _ = toolchain_map
package_tuple = (os.path.join(build_platform, package_name),
tcdir % {'platform': platform})
extract_packages.append(package_tuple)
# On linux we also want to extract the arm_trusted package which contains
# the ARM libraries we ship in support of sel_ldr_arm.
if platform == 'linux':
extract_packages.append((os.path.join(build_platform, 'arm_trusted'),
'arm_trusted'))
if extract_packages:
# Extract all of the packages into the temp directory.
package_names = [package_tuple[0] for package_tuple in extract_packages]
buildbot_common.Run([sys.executable, PKGVER,
'--packages', ','.join(package_names),
'--tar-dir', NACL_TOOLCHAINTARS_DIR,
'--dest-dir', tmpdir,
'extract'])
# Move all the packages we extracted to the correct destination.
for package_name, dest_dir in extract_packages:
full_src_dir = os.path.join(tmpdir, package_name)
full_dst_dir = os.path.join(pepperdir, 'toolchain', dest_dir)
buildbot_common.Move(full_src_dir, full_dst_dir)
# Cleanup the temporary directory we are no longer using.
buildbot_common.RemoveDir(tmpdir)
# List of toolchain headers to install.
# Source is relative to top of Chromium tree, destination is relative
# to the toolchain header directory.
NACL_HEADER_MAP = {
'newlib': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/pthread/pthread.h', ''),
('native_client/src/untrusted/pthread/semaphore.h', ''),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
'glibc': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
}
def InstallFiles(src_root, dest_root, file_list):
"""Copy a set of files from src_root to dest_root according
to the given mapping. This allows files to be copied from
to a location in the destination tree that is different to the
location in the source tree.
If the destination mapping ends with a '/' then the destination
basename is inherited from the the source file.
Wildcards can be used in the source list but it is not recommended
as this can end up adding things to the SDK unintentionally.
"""
for file_spec in file_list:
# The list of files to install can be a simple list of
# strings or a list of pairs, where each pair corresponds
# to a mapping from source to destination names.
if type(file_spec) == str:
src_file = dest_file = file_spec
else:
src_file, dest_file = file_spec
src_file = os.path.join(src_root, src_file)
# Expand sources files using glob.
sources = glob.glob(src_file)
if not sources:
sources = [src_file]
if len(sources) > 1 and not dest_file.endswith('/'):
buildbot_common.ErrorExit("Target file must end in '/' when "
"using globbing to install multiple files")
for source in sources:
if dest_file.endswith('/'):
dest = os.path.join(dest_file, os.path.basename(source))
else:
dest = dest_file
dest = os.path.join(dest_root, dest)
if not os.path.isdir(os.path.dirname(dest)):
buildbot_common.MakeDir(os.path.dirname(dest))
buildbot_common.CopyFile(source, dest)
def InstallNaClHeaders(tc_dst_inc, tcname):
"""Copies NaCl headers to expected locations in the toolchain."""
InstallFiles(SRC_DIR, tc_dst_inc, NACL_HEADER_MAP[GetToolchainLibc(tcname)])
def MakeNinjaRelPath(path):
return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path)
# TODO(ncbray): stop building and copying libraries into the SDK that are
# already provided by the toolchain.
# Mapping from libc to libraries gyp-build trusted libraries
TOOLCHAIN_LIBS = {
'newlib' : [
'libminidump_generator.a',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_exception.a',
'libnacl_list_mappings.a',
'libnosys.a',
'libppapi.a',
'libppapi_stub.a',
'libpthread.a',
],
'glibc': [
'libminidump_generator.a',
'libminidump_generator.so',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_dyncode.so',
'libnacl_exception.a',
'libnacl_exception.so',
'libnacl_list_mappings.a',
'libnacl_list_mappings.so',
'libppapi.a',
'libppapi.so',
'libppapi_stub.a',
]
}
def GypNinjaInstall(pepperdir, toolchains):
tools_files_32 = [
['sel_ldr', 'sel_ldr_x86_32'],
['irt_core_newlib_x32.nexe', 'irt_core_x86_32.nexe'],
['irt_core_newlib_x64.nexe', 'irt_core_x86_64.nexe'],
]
arm_files = [
['elf_loader_newlib_arm.nexe', 'elf_loader_arm.nexe'],
]
tools_files_64 = []
platform = getos.GetPlatform()
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if platform != 'win':
tools_files_64 += [
['dump_syms', 'dump_syms'],
['minidump_dump', 'minidump_dump'],
['minidump_stackwalk', 'minidump_stackwalk']
]
tools_files_64.append(['sel_ldr', 'sel_ldr_x86_64'])
tools_files_64.append(['ncval_new', 'ncval'])
if platform == 'linux':
tools_files_32.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_32'])
tools_files_64.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_64'])
tools_files_32.append(['nonsfi_loader_newlib_x32_nonsfi.nexe',
'nonsfi_loader_x86_32'])
tools_dir = os.path.join(pepperdir, 'tools')
buildbot_common.MakeDir(tools_dir)
# Add .exe extensions to all windows tools
for pair in tools_files_32 + tools_files_64:
if platform == 'win' and not pair[0].endswith('.nexe'):
pair[0] += '.exe'
pair[1] += '.exe'
# Add ARM binaries
if platform == 'linux' and not options.no_arm_trusted:
arm_files += [
['irt_core_newlib_arm.nexe', 'irt_core_arm.nexe'],
['nacl_helper_bootstrap', 'nacl_helper_bootstrap_arm'],
['nonsfi_loader_newlib_arm_nonsfi.nexe', 'nonsfi_loader_arm'],
['sel_ldr', 'sel_ldr_arm']
]
InstallFiles(GetNinjaOutDir('x64'), tools_dir, tools_files_64)
InstallFiles(GetNinjaOutDir('ia32'), tools_dir, tools_files_32)
InstallFiles(GetNinjaOutDir('arm'), tools_dir, arm_files)
for tc in toolchains:
if tc in ('host', 'clang-newlib'):
continue
elif tc == 'pnacl':
xarches = (None, 'ia32', 'x64', 'arm')
elif tc in ('x86_glibc', 'x86_newlib'):
xarches = ('ia32', 'x64')
elif tc == 'arm_glibc':
xarches = ('arm',)
else:
raise AssertionError('unexpected toolchain value: %s' % tc)
for xarch in xarches:
src_dir = GetGypBuiltLib(tc, xarch)
dst_dir = GetOutputToolchainLib(pepperdir, tc, xarch)
libc = GetToolchainLibc(tc)
InstallFiles(src_dir, dst_dir, TOOLCHAIN_LIBS[libc])
def GypNinjaBuild_NaCl(rel_out_dir):
gyp_py = os.path.join(NACL_DIR, 'build', 'gyp_nacl')
nacl_core_sdk_gyp = os.path.join(NACL_DIR, 'build', 'nacl_core_sdk.gyp')
all_gyp = os.path.join(NACL_DIR, 'build', 'all.gyp')
out_dir_32 = MakeNinjaRelPath(rel_out_dir + '-ia32')
out_dir_64 = MakeNinjaRelPath(rel_out_dir + '-x64')
out_dir_arm = MakeNinjaRelPath(rel_out_dir + '-arm')
out_dir_clang_32 = MakeNinjaRelPath(rel_out_dir + '-clang-ia32')
out_dir_clang_64 = MakeNinjaRelPath(rel_out_dir + '-clang-x64')
out_dir_clang_arm = MakeNinjaRelPath(rel_out_dir + '-clang-arm')
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_32,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_64,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_arm,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_32, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_64, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_arm, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, all_gyp, 'ncval_new', out_dir_64)
def GypNinjaBuild_Breakpad(rel_out_dir):
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if getos.GetPlatform() == 'win':
return
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'breakpad', 'breakpad.gyp')
build_list = ['dump_syms', 'minidump_dump', 'minidump_stackwalk']
GypNinjaBuild('x64', gyp_py, gyp_file, build_list, out_dir)
def GypNinjaBuild_PPAPI(arch, rel_out_dir, gyp_defines=None):
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client',
'native_client.gyp')
GypNinjaBuild(arch, gyp_py, gyp_file, 'ppapi_lib', out_dir,
gyp_defines=gyp_defines)
def GypNinjaBuild_Pnacl(rel_out_dir, target_arch):
# TODO(binji): This will build the pnacl_irt_shim twice; once as part of the
# Chromium build, and once here. When we move more of the SDK build process
# to gyp, we can remove this.
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', 'src',
'untrusted', 'pnacl_irt_shim', 'pnacl_irt_shim.gyp')
targets = ['aot']
GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir)
def GypNinjaBuild(arch, gyp_py_script, gyp_file, targets,
out_dir, gyp_defines=None):
gyp_env = dict(os.environ)
gyp_env['GYP_GENERATORS'] = 'ninja'
gyp_defines = gyp_defines or []
gyp_defines.append('nacl_allow_thin_archives=0')
if not options.no_use_sysroot:
gyp_defines.append('use_sysroot=1')
if options.mac_sdk:
gyp_defines.append('mac_sdk=%s' % options.mac_sdk)
if arch is not None:
gyp_defines.append('target_arch=%s' % arch)
if arch == 'arm':
gyp_env['GYP_CROSSCOMPILE'] = '1'
if options.no_arm_trusted:
gyp_defines.append('disable_cross_trusted=1')
if getos.GetPlatform() == 'mac':
gyp_defines.append('clang=1')
gyp_env['GYP_DEFINES'] = ' '.join(gyp_defines)
# We can't use windows path separators in GYP_GENERATOR_FLAGS since
# gyp uses shlex to parse them and treats '\' as an escape char.
gyp_env['GYP_GENERATOR_FLAGS'] = 'output_dir=%s' % out_dir.replace('\\', '/')
# Print relevant environment variables
for key, value in gyp_env.iteritems():
if key.startswith('GYP') or key in ('CC',):
print ' %s="%s"' % (key, value)
buildbot_common.Run(
[sys.executable, gyp_py_script, gyp_file, '--depth=.'],
cwd=SRC_DIR,
env=gyp_env)
NinjaBuild(targets, out_dir, arch)
def NinjaBuild(targets, out_dir, arch):
if type(targets) is not list:
targets = [targets]
out_config_dir = os.path.join(out_dir, GetConfigDir(arch))
buildbot_common.Run(['ninja', '-C', out_config_dir] + targets, cwd=SRC_DIR)
def BuildStepBuildToolchains(pepperdir, toolchains, build, clean):
buildbot_common.BuildStep('SDK Items')
if clean:
for dirname in glob.glob(os.path.join(OUT_DIR, GYPBUILD_DIR + '*')):
buildbot_common.RemoveDir(dirname)
build = True
if build:
GypNinjaBuild_NaCl(GYPBUILD_DIR)
GypNinjaBuild_Breakpad(GYPBUILD_DIR + '-x64')
if set(toolchains) & set(['x86_glibc', 'x86_newlib']):
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-ia32',
['use_nacl_clang=0'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-x64',
['use_nacl_clang=0'])
if 'arm_glibc' in toolchains:
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-arm',
['use_nacl_clang=0'] )
if 'pnacl' in toolchains:
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-clang-ia32',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-clang-x64',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-clang-arm',
['use_nacl_clang=1'])
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
GypNinjaBuild_Pnacl(build_dir, arch)
GypNinjaInstall(pepperdir, toolchains)
for toolchain in toolchains:
if toolchain not in ('host', 'clang-newlib'):
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, toolchain),
toolchain)
if 'pnacl' in toolchains:
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
if arch == 'ia32':
nacl_arches = ['x86-32', 'x86-64']
elif arch == 'arm':
nacl_arches = ['arm']
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
for nacl_arch in nacl_arches:
release_build_dir = os.path.join(OUT_DIR, build_dir, 'Release',
'gen', 'tc_pnacl_translate',
'lib-' + nacl_arch)
pnacldir = GetToolchainDir(pepperdir, 'pnacl')
pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir, nacl_arch)
if not os.path.isdir(pnacl_translator_lib_dir):
buildbot_common.ErrorExit('Expected %s directory to exist.' %
pnacl_translator_lib_dir)
buildbot_common.CopyFile(
os.path.join(release_build_dir, 'libpnacl_irt_shim.a'),
pnacl_translator_lib_dir)
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'x86'),
'pnacl')
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'arm'),
'pnacl')
def MakeDirectoryOrClobber(pepperdir, dirname, clobber):
dirpath = os.path.join(pepperdir, dirname)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
return dirpath
def BuildStepUpdateHelpers(pepperdir, clobber):
buildbot_common.BuildStep('Update project helpers')
build_projects.UpdateHelpers(pepperdir, clobber=clobber)
def BuildStepUpdateUserProjects(pepperdir, toolchains,
build_experimental, clobber):
buildbot_common.BuildStep('Update examples and libraries')
filters = {}
if not build_experimental:
filters['EXPERIMENTAL'] = False
dsc_toolchains = []
for t in toolchains:
if t.startswith('x86_') or t.startswith('arm_'):
if t[4:] not in dsc_toolchains:
dsc_toolchains.append(t[4:])
elif t == 'host':
dsc_toolchains.append(getos.GetPlatform())
else:
dsc_toolchains.append(t)
filters['TOOLS'] = dsc_toolchains
# Update examples and libraries
filters['DEST'] = [
'getting_started',
'examples/api',
'examples/demo',
'examples/tutorial',
'src'
]
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
build_projects.UpdateProjects(pepperdir, tree, clobber=clobber,
toolchains=dsc_toolchains)
def BuildStepMakeAll(pepperdir, directory, step_name,
deps=True, clean=False, config='Debug', args=None):
buildbot_common.BuildStep(step_name)
build_projects.BuildProjectsBranch(pepperdir, directory, clean,
deps, config, args)
def BuildStepBuildLibraries(pepperdir, directory):
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Debug',
clean=True, config='Debug')
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Release',
clean=True, config='Release')
# Cleanup .pyc file generated while building libraries. Without
# this we would end up shipping the pyc in the SDK tarball.
buildbot_common.RemoveFile(os.path.join(pepperdir, 'tools', '*.pyc'))
def GenerateNotice(fileroot, output_filename='NOTICE', extra_files=None):
# Look for LICENSE files
license_filenames_re = re.compile('LICENSE|COPYING|COPYRIGHT')
license_files = []
for root, _, files in os.walk(fileroot):
for filename in files:
if license_filenames_re.match(filename):
path = os.path.join(root, filename)
license_files.append(path)
if extra_files:
license_files += [os.path.join(fileroot, f) for f in extra_files]
print '\n'.join(license_files)
if not os.path.isabs(output_filename):
output_filename = os.path.join(fileroot, output_filename)
generate_notice.Generate(output_filename, fileroot, license_files)
def BuildStepVerifyFilelist(pepperdir):
buildbot_common.BuildStep('Verify SDK Files')
file_list_path = os.path.join(SCRIPT_DIR, 'sdk_files.list')
try:
print 'SDK directory: %s' % pepperdir
verify_filelist.Verify(file_list_path, pepperdir)
print 'OK'
except verify_filelist.ParseException, e:
buildbot_common.ErrorExit('Parsing sdk_files.list failed:\n\n%s' % e)
except verify_filelist.VerifyException, e:
file_list_rel = os.path.relpath(file_list_path)
verify_filelist_py = os.path.splitext(verify_filelist.__file__)[0] + '.py'
verify_filelist_py = os.path.relpath(verify_filelist_py)
pepperdir_rel = os.path.relpath(pepperdir)
msg = """\
SDK verification failed:
%s
Add/remove files from %s to fix.
Run:
./%s %s %s
to test.""" % (e, file_list_rel, verify_filelist_py, file_list_rel,
pepperdir_rel)
buildbot_common.ErrorExit(msg)
def BuildStepTarBundle(pepper_ver, tarfile):
buildbot_common.BuildStep('Tar Pepper Bundle')
buildbot_common.MakeDir(os.path.dirname(tarfile))
buildbot_common.Run([sys.executable, CYGTAR, '-C', OUT_DIR, '-cjf', tarfile,
'pepper_' + pepper_ver], cwd=NACL_DIR)
def GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, tarfile,
archive_url):
with open(tarfile, 'rb') as tarfile_stream:
archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(
tarfile_stream)
archive = manifest_util.Archive(manifest_util.GetHostOS())
archive.url = archive_url
archive.size = archive_size
archive.checksum = archive_sha1
bundle = manifest_util.Bundle('pepper_' + pepper_ver)
bundle.revision = int(chrome_revision)
bundle.repath = 'pepper_' + pepper_ver
bundle.version = int(pepper_ver)
bundle.description = (
'Chrome %s bundle. Chrome revision: %s. NaCl revision: %s' % (
pepper_ver, chrome_revision, nacl_revision))
bundle.stability = 'dev'
bundle.recommended = 'no'
bundle.archives = [archive]
return bundle
def Archive(filename, from_directory, step_link=True):
if buildbot_common.IsSDKBuilder():
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/'
else:
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk_test/'
bucket_path += build_version.ChromeVersion()
buildbot_common.Archive(filename, bucket_path, from_directory, step_link)
def BuildStepArchiveBundle(name, pepper_ver, chrome_revision, nacl_revision,
tarfile):
buildbot_common.BuildStep('Archive %s' % name)
tarname = os.path.basename(tarfile)
tarfile_dir = os.path.dirname(tarfile)
Archive(tarname, tarfile_dir)
# generate "manifest snippet" for this archive.
archive_url = GSTORE + 'nacl_sdk/%s/%s' % (
build_version.ChromeVersion(), tarname)
bundle = GetManifestBundle(pepper_ver, chrome_revision, nacl_revision,
tarfile, archive_url)
manifest_snippet_file = os.path.join(OUT_DIR, tarname + '.json')
with open(manifest_snippet_file, 'wb') as manifest_snippet_stream:
manifest_snippet_stream.write(bundle.GetDataAsString())
Archive(tarname + '.json', OUT_DIR, step_link=False)
def BuildStepBuildPNaClComponent(version, revision):
# Sadly revision can go backwords for a given version since when a version
# is built from master, revision will be a huge number (in the hundreds of
# thousands. Once the branch happens the revision will reset to zero.
# TODO(sbc): figure out how to compensate for this in some way such that
# revisions always go forward for a given version.
buildbot_common.BuildStep('PNaCl Component')
# Version numbers must follow the format specified in:
# https://developer.chrome.com/extensions/manifest/version
# So ensure that rev_major/rev_minor don't overflow and ensure there
# are no leading zeros.
if len(revision) > 4:
rev_minor = int(revision[-4:])
rev_major = int(revision[:-4])
version = "0.%s.%s.%s" % (version, rev_major, rev_minor)
else:
version = "0.%s.0.%s" % (version, revision)
buildbot_common.Run(['./make_pnacl_component.sh',
'pnacl_multicrx_%s.zip' % revision,
version], cwd=SCRIPT_DIR)
def BuildStepArchivePNaClComponent(revision):
buildbot_common.BuildStep('Archive PNaCl Component')
Archive('pnacl_multicrx_%s.zip' % revision, OUT_DIR)
def BuildStepArchiveSDKTools():
buildbot_common.BuildStep('Build SDK Tools')
build_updater.BuildUpdater(OUT_DIR)
buildbot_common.BuildStep('Archive SDK Tools')
Archive('sdk_tools.tgz', OUT_DIR, step_link=False)
Archive('nacl_sdk.zip', OUT_DIR, step_link=False)
def BuildStepBuildAppEngine(pepperdir, chrome_revision):
"""Build the projects found in src/gonacl_appengine/src"""
buildbot_common.BuildStep('Build GoNaCl AppEngine Projects')
cmd = ['make', 'upload', 'REVISION=%s' % chrome_revision]
env = dict(os.environ)
env['NACL_SDK_ROOT'] = pepperdir
env['NACLPORTS_NO_ANNOTATE'] = "1"
buildbot_common.Run(cmd, env=env, cwd=GONACL_APPENGINE_SRC_DIR)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--qemu', help='Add qemu for ARM.',
action='store_true')
parser.add_argument('--tar', help='Force the tar step.',
action='store_true')
parser.add_argument('--archive', help='Force the archive step.',
action='store_true')
parser.add_argument('--release', help='PPAPI release version.',
dest='release', default=None)
parser.add_argument('--build-app-engine',
help='Build AppEngine demos.', action='store_true')
parser.add_argument('--experimental',
help='build experimental examples and libraries', action='store_true',
dest='build_experimental')
parser.add_argument('--skip-toolchain', help='Skip toolchain untar',
action='store_true')
parser.add_argument('--no-clean', dest='clean', action='store_false',
help="Don't clean gypbuild directories")
parser.add_argument('--mac-sdk',
help='Set the mac-sdk (e.g. 10.6) to use when building with ninja.')
parser.add_argument('--no-arm-trusted', action='store_true',
help='Disable building of ARM trusted components (sel_ldr, etc).')
parser.add_argument('--no-use-sysroot', action='store_true',
help='Disable building against sysroot.')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_sdk.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
global options
options = parser.parse_args(args)
buildbot_common.BuildStep('build_sdk')
if buildbot_common.IsSDKBuilder():
options.archive = True
# TODO(binji): re-enable app_engine build when the linux builder stops
# breaking when trying to git clone from github.
# See http://crbug.com/412969.
options.build_app_engine = False
options.tar = True
# NOTE: order matters here. This will be the order that is specified in the
# Makefiles; the first toolchain will be the default.
toolchains = ['pnacl', 'x86_glibc', 'arm_glibc', 'clang-newlib', 'host']
print 'Building: ' + ' '.join(toolchains)
platform = getos.GetPlatform()
if options.archive and not options.tar:
parser.error('Incompatible arguments with archive.')
chrome_version = int(build_version.ChromeMajorVersion())
chrome_revision = build_version.ChromeRevision()
nacl_revision = build_version.NaClRevision()
pepper_ver = str(chrome_version)
pepper_old = str(chrome_version - 1)
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
pepperdir_old = os.path.join(OUT_DIR, 'pepper_' + pepper_old)
tarname = 'naclsdk_%s.tar.bz2' % platform
tarfile = os.path.join(OUT_DIR, tarname)
if options.release:
pepper_ver = options.release
print 'Building PEPPER %s at %s' % (pepper_ver, chrome_revision)
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
if platform == 'linux':
# Linux-only: make sure the debian/stable sysroot image is installed
install_script = os.path.join(SRC_DIR, 'build', 'linux', 'sysroot_scripts',
'install-sysroot.py')
buildbot_common.Run([sys.executable, install_script, '--arch=arm'])
buildbot_common.Run([sys.executable, install_script, '--arch=i386'])
buildbot_common.Run([sys.executable, install_script, '--arch=amd64'])
if not options.skip_toolchain:
BuildStepCleanPepperDirs(pepperdir, pepperdir_old)
BuildStepMakePepperDirs(pepperdir, ['include', 'toolchain', 'tools'])
BuildStepDownloadToolchains(toolchains)
BuildStepUntarToolchains(pepperdir, toolchains)
if platform == 'linux':
buildbot_common.Move(os.path.join(pepperdir, 'toolchain', 'arm_trusted'),
os.path.join(OUT_DIR, 'arm_trusted'))
if platform == 'linux':
# Linux-only: Copy arm libraries from the arm_trusted package. These are
# needed to be able to run sel_ldr_arm under qemu.
arm_libs = [
'lib/arm-linux-gnueabihf/librt.so.1',
'lib/arm-linux-gnueabihf/libpthread.so.0',
'lib/arm-linux-gnueabihf/libgcc_s.so.1',
'lib/arm-linux-gnueabihf/libc.so.6',
'lib/arm-linux-gnueabihf/ld-linux-armhf.so.3',
'lib/arm-linux-gnueabihf/libm.so.6',
'usr/lib/arm-linux-gnueabihf/libstdc++.so.6'
]
arm_lib_dir = os.path.join(pepperdir, 'tools', 'lib', 'arm_trusted', 'lib')
buildbot_common.MakeDir(arm_lib_dir)
for arm_lib in arm_libs:
arm_lib = os.path.join(OUT_DIR, 'arm_trusted', arm_lib)
buildbot_common.CopyFile(arm_lib, arm_lib_dir)
buildbot_common.CopyFile(os.path.join(OUT_DIR, 'arm_trusted', 'qemu-arm'),
os.path.join(pepperdir, 'tools'))
BuildStepBuildToolchains(pepperdir, toolchains,
not options.skip_toolchain,
options.clean)
BuildStepUpdateHelpers(pepperdir, True)
BuildStepUpdateUserProjects(pepperdir, toolchains,
options.build_experimental, True)
BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, nacl_revision)
# Ship with libraries prebuilt, so run that first.
BuildStepBuildLibraries(pepperdir, 'src')
GenerateNotice(pepperdir)
# Verify the SDK contains what we expect.
BuildStepVerifyFilelist(pepperdir)
if options.tar:
BuildStepTarBundle(pepper_ver, tarfile)
if platform == 'linux':
BuildStepBuildPNaClComponent(pepper_ver, chrome_revision)
if options.build_app_engine and platform == 'linux':
BuildStepBuildAppEngine(pepperdir, chrome_revision)
if options.qemu:
qemudir = os.path.join(NACL_DIR, 'toolchain', 'linux_arm-trusted')
oshelpers.Copy(['-r', qemudir, pepperdir])
# Archive the results on Google Cloud Storage.
if options.archive:
BuildStepArchiveBundle('build', pepper_ver, chrome_revision, nacl_revision,
tarfile)
# Only archive sdk_tools/naclport/pnacl_component on linux.
if platform == 'linux':
BuildStepArchiveSDKTools()
BuildStepArchivePNaClComponent(chrome_revision)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('build_sdk: interrupted')
| bsd-3-clause | 1,889,471,415,100,197,600 | 34.797244 | 79 | 0.664889 | false | 3.260421 | false | false | false |
moodpulse/l2 | refprocessor/processor.py | 1 | 1703 | from typing import Tuple, Union, List
from appconf.manager import SettingManager
from refprocessor.age_parser import AgeRight
from refprocessor.common import ValueRange, RANGE_IN
from refprocessor.result_parser import ResultRight
class RefProcessor:
def __init__(self, ref: dict, age: List[int]):
actual_key, actual_ref, actual_raw_ref = RefProcessor.get_actual_ref(ref, age)
self.key = actual_key
self.ref = actual_ref
self.raw_ref = actual_raw_ref
@staticmethod
def get_actual_ref(ref: dict, age: List[int]) -> Union[Tuple[str, ResultRight, str], Tuple[None, None, None]]:
for k in ref:
age_rights = AgeRight(k)
if age_rights.test(age):
return k, ResultRight(ref[k]), ref[k]
return None, None, None
def get_active_ref(self, raw_ref=True, single=False):
if raw_ref:
if single:
show_only_needed_ref = SettingManager.get("show_only_needed_ref", default='True', default_type='b')
if not show_only_needed_ref or not self.raw_ref:
return None
show_full_needed_ref = SettingManager.get("show_full_needed_ref", default='False', default_type='b')
if show_full_needed_ref:
return {self.key: self.raw_ref}
return {'Все': self.raw_ref}
return self.raw_ref
if isinstance(self.ref, ResultRight):
return self.ref
return ValueRange((0, ")"), (0, ")"))
def calc(self, value):
if isinstance(self.ref, ResultRight):
return self.ref.test(value)
return ResultRight.RESULT_MODE_NORMAL, RANGE_IN
| mit | 6,272,185,253,824,169,000 | 36.777778 | 116 | 0.606471 | false | 3.71179 | false | false | false |
TacticalGoat/reddit | AutoContributor/autocontributor.py | 1 | 2306 | #/u/GoldenSights
import praw # simple interface to the reddit API, also handles rate limiting of requests
import time
import sqlite3
'''USER CONFIGURATION'''
APP_ID = ""
APP_SECRET = ""
APP_URI = ""
APP_REFRESH = ""
# https://www.reddit.com/comments/3cm1p8/how_to_make_your_bot_use_oauth2/
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter bot"
SUBREDDIT = "GoldTesting"
#The subreddit you are acting on.
SUBJECTLINE = ['submission']
#If the modmail subject line contains one of these keywords, he will be added
MAXPOSTS = 100
#The number of modmails to collect at once. 100 can be fetched with a single request
WAIT = 30
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
'''All done!'''
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(ID TEXT)')
print('Loaded Users table')
sql.commit()
try:
import bot
USERAGENT = bot.aG
except ImportError:
pass
WAITS = str(WAIT)
print('Logging in.')
r = praw.Reddit(USERAGENT)
r.set_oauth_app_info(APP_ID, APP_SECRET, APP_URI)
r.refresh_access_information(APP_REFRESH)
def scanmessages():
print('Getting ' + SUBREDDIT + ' modmail')
subreddit = r.get_subreddit(SUBREDDIT)
modmail = list(subreddit.get_mod_mail(limit=MAXPOSTS))
for message in modmail:
cur.execute('SELECT * FROM oldposts WHERE ID=?', [message.fullname])
if not cur.fetchone():
print(message.fullname)
try:
mauthor = message.author.name
msubject = message.subject.lower()
if any(keyword.lower() in msubject for keyword in SUBJECTLINE):
print('\tApproving ' + mauthor)
subreddit.add_contributor(mauthor)
message.mark_as_read()
except AttributeError:
print('Failed to fetch username')
cur.execute('INSERT INTO oldposts VALUES(?)', [message.fullname])
sql.commit()
while True:
try:
scanmessages()
except Exception as e:
print('ERROR: ' + str(e))
sql.commit()
print('Running again in ' + WAITS + ' seconds \n_________\n')
time.sleep(WAIT)
| mit | -7,222,148,781,160,823,000 | 28.948052 | 104 | 0.652645 | false | 3.575194 | false | false | false |
spectrumone/django-outlook-api | python_tutorial/tutorial/outlookservice.py | 1 | 4799 | import requests
import uuid
import json
outlook_api_endpoint = 'https://outlook.office.com/api/v2.0{0}'
# Generic API Sending
def make_api_call(method, url, token, payload = None, parameters = None):
# Send these headers with all API calls
headers = { 'User-Agent' : 'django-tutorial/1.0',
'Authorization' : 'Bearer {0}'.format(token),
'Accept' : 'application/json'}
# Use these headers to instrument calls. Makes it easier
# to correlate requests and responses in case of problems
# and is a recommended best practice.
request_id = str(uuid.uuid4())
instrumentation = { 'client-request-id' : request_id,
'return-client-request-id' : 'true' }
headers.update(instrumentation)
response = None
payload = {
"Subject": "Discuss the Calendar REST API",
"Body": {
"ContentType": "HTML",
"Content": "I think it will meet our requirements!"
},
"Start": {
"DateTime": "2014-04-04T18:00:00",
"TimeZone": "Pacific Standard Time"
},
"End": {
"DateTime": "2014-04-04T19:00:00",
"TimeZone": "Pacific Standard Time"
},
"Attendees": [
{
"EmailAddress": {
"Address": "[email protected]",
"Name": "Janet Schorr"
},
"Type": "Required"
}
]
}
if (method.upper() == 'GET'):
response = requests.get(url, headers = headers, params = parameters)
elif (method.upper() == 'DELETE'):
response = requests.delete(url, headers = headers, params = parameters)
elif (method.upper() == 'PATCH'):
headers.update({ 'Content-Type' : 'application/json' })
response = requests.patch(url, headers = headers, data = json.dumps(payload), params = parameters)
elif (method.upper() == 'POST'):
headers.update({ 'Content-Type' : 'application/json' })
response = requests.post(url, headers = headers, data = json.dumps(payload), params = parameters)
return response
def get_my_messages(access_token):
get_messages_url = outlook_api_endpoint.format('/Me/Messages')
# Use OData query parameters to control the results
# - Only first 10 results returned
# - Only return the ReceivedDateTime, Subject, and From fields
# - Sort the results by the ReceivedDateTime field in descending order
query_parameters = {'$top': '10',
'$select': 'ReceivedDateTime,Subject,From',
'$orderby': 'ReceivedDateTime DESC'}
r = make_api_call('GET', get_messages_url, access_token, parameters = query_parameters)
if (r.status_code == requests.codes.ok):
return r.json()
else:
return "{0}: {1}".format(r.status_code, r.text)
def get_my_events(access_token):
get_events_url = outlook_api_endpoint.format('/Me/Events')
# Use OData query parameters to control the results
# - Only first 10 results returned
# - Only return the Subject, Start, and End fields
# - Sort the results by the Start field in ascending order
query_parameters = {'$top': '10',
'$select': 'Subject,Start,End',
'$orderby': 'Start/DateTime ASC'}
r = make_api_call('GET', get_events_url, access_token, parameters = query_parameters)
if (r.status_code == requests.codes.ok):
return r.json()
else:
return "{0}: {1}".format(r.status_code, r.text)
def post_my_events(access_token):
post_events_url = outlook_api_endpoint.format('/Me/Events')
r = make_api_call('POST', post_events_url, access_token)
if (r.status_code == requests.codes.ok):
return r.json()
else:
return "{0}: {1}".format(r.status_code, r.text)
def get_my_contacts(access_token):
get_contacts_url = outlook_api_endpoint.format('/Me/Contacts')
# Use OData query parameters to control the results
# - Only first 10 results returned
# - Only return the GivenName, Surname, and EmailAddresses fields
# - Sort the results by the GivenName field in ascending order
query_parameters = {'$top': '10',
'$select': 'GivenName,Surname,EmailAddresses',
'$orderby': 'GivenName ASC'}
r = make_api_call('GET', get_contacts_url, access_token, parameters = query_parameters)
if (r.status_code == requests.codes.ok):
return r.json()
else:
return "{0}: {1}".format(r.status_code, r.text)
| mit | 156,802,696,262,218,100 | 36.787402 | 106 | 0.574703 | false | 3.933607 | false | false | false |
frankban/UbuntuPaste | ubuntupaste.py | 1 | 6417 | # This software is licensed under the GNU Affero General Public License
# version 3 (see the file LICENSE).
import itertools
import os
import pwd
import threading
import urllib
import urllib2
import webbrowser
import sublime
import sublime_plugin
class UserInterface(object):
"""User interface for this plugin."""
def __init__(self, command_name, view):
self.command_name = command_name.title()
self.view = view
self.count = itertools.count()
def _get_content(self, contents):
return '{0}: {1}'.format(self.command_name, ' '.join(contents))
def message(self, *contents):
"""Display a message in the status bar."""
sublime.status_message(self._get_content(contents))
def status(self, *contents):
"""Add a status to the view, using contents as value."""
self.view.set_status(self.command_name, self._get_content(contents))
def progress(self, url):
"""Show pasting progress."""
dots = '.' * (self.count.next() % 4)
self.status('Pasting to', url, '[', dots.ljust(3), ']')
def error(self, *contents):
"""Display an error in the status bar."""
self.message('ERROR:', *contents)
def success(self, result, copy_to_clipboard, open_in_browser):
"""Paste succeded."""
contents = ['URL:', result, '|']
if copy_to_clipboard:
contents.append('Copied to your clipboard!')
if open_in_browser:
contents.append('Opened in your browser!')
self.message(*contents)
def done(self):
"""Erase the status messages."""
self.view.erase_status(self.command_name)
class Settings(object):
"""Store and validate plugin settings."""
def __init__(self, global_settings, local_settings):
self._global_settings = global_settings
self._local_settings = local_settings
self.error = None
self.options = ()
def _get_poster(self):
"""Get the current system user name."""
return os.getenv('USER', pwd.getpwuid(os.geteuid()).pw_name)
def _get_syntax(self, syntax_map, default):
"""Return the syntax to be used by the paster."""
syntax_file = self._global_settings.get('syntax')
if syntax_file is None:
return default
syntax = os.path.splitext(os.path.basename(syntax_file))[0]
return syntax_map.get(syntax.lower(), default)
def are_valid(self):
"""Validate and set up options."""
settings = self._local_settings
url = settings.get('url')
if url is None:
self.error = 'Invalid URL.'
return False
copy_to_clipboard = settings.get('copy_to_clipboard', True)
open_in_browser = settings.get('open_in_browser', False)
if not (copy_to_clipboard or open_in_browser):
self.error = 'You need to either copy or open the URL.'
return False
poster = settings.get('poster')
if not poster:
poster = self._get_poster()
sep = settings.get('sep', '\n\n # ---\n\n')
syntax_default = settings.get('syntax_default', 'text')
syntax_guess = settings.get('syntax_guess', True)
if syntax_guess:
syntax_map = settings.get('syntax_map', {})
syntax = self._get_syntax(syntax_map, syntax_default)
else:
syntax = syntax_default
self.options = (
url, copy_to_clipboard, open_in_browser, poster, sep, syntax
)
return True
class Paster(threading.Thread):
"""Paste code snippets to ubuntu pastebin."""
def __init__(self, url, **kwargs):
self.url = url
self.data = kwargs
self.error = None
self.result = None
threading.Thread.__init__(self)
def run(self):
try:
request = urllib2.Request(
self.url, urllib.urlencode(self.data),
headers={'User-Agent': 'SublimeText2'})
response = urllib2.urlopen(request, timeout=5)
except urllib2.HTTPError as err:
self.error = 'HTTP error {0}.'.format(err.code)
except urllib2.URLError as err:
self.error = 'URL error {0}.'.format(err.reason)
else:
self.result = response.url
class UbuntupasteCommand(sublime_plugin.TextCommand):
"""Paste code snippets on http://pastebin.ubuntu.com/."""
def __init__(self, *args, **kwargs):
self.ui = None
self._is_enabled = True
super(UbuntupasteCommand, self).__init__(*args, **kwargs)
def is_enabled(self):
return self._is_enabled
def get_content(self, sep):
"""Return the contents of current selections.
If no region is selected, return all the text in the current view.
"""
view = self.view
regions = [i for i in view.sel() if not i.empty()]
if not regions:
regions = [sublime.Region(0, view.size())]
return sep.join(view.substr(region) for region in regions)
def run(self, edit):
self._is_enabled = False
self.ui = UserInterface(self.name(), self.view)
settings = Settings(
self.view.settings(),
sublime.load_settings('UbuntuPaste.sublime-settings'))
if settings.are_valid():
self.handle(*settings.options)
else:
self.ui.error(settings.error)
def handle(
self, url, copy_to_clipboard, open_in_browser, poster, sep, syntax):
paster = Paster(
url, content=self.get_content(sep), poster=poster, syntax=syntax)
self.ui.progress(url)
paster.start()
self.wait(paster, copy_to_clipboard, open_in_browser)
def wait(self, paster, *args):
if not paster.is_alive():
return self.done(paster, *args)
self.ui.progress(paster.url)
sublime.set_timeout(lambda: self.wait(paster, *args), 200)
def done(self, paster, copy_to_clipboard, open_in_browser):
result = paster.result
if result:
if copy_to_clipboard:
sublime.set_clipboard(result)
if open_in_browser:
webbrowser.open(result)
self.ui.success(result, copy_to_clipboard, open_in_browser)
else:
self.ui.error(paster.error)
self.ui.done()
self._is_enabled = True
| agpl-3.0 | 5,063,318,384,990,150,000 | 32.773684 | 77 | 0.59171 | false | 3.870326 | false | false | false |
MontrealCorpusTools/polyglot-server | iscan/annotator/models.py | 1 | 4133 | from django.db import models
from polyglotdb import CorpusContext
# Create your models here.
class Annotation(models.Model):
ITEM_TYPE_CHOICES = (('U', 'Utterance'),
('W', 'Word'),
('Y', 'Syllable'),
('P', 'Phone'))
corpus = models.ForeignKey('iscan.Corpus', on_delete=models.CASCADE)
item_type = models.CharField(max_length=1, choices=ITEM_TYPE_CHOICES, default='P')
label = models.CharField(max_length=100)
save_user = models.BooleanField(default=False)
def __str__(self):
return '{}'.format(self.label)
def check_hierarchy(self):
a_type = self.get_item_type_display().lower()
with CorpusContext(self.corpus.config) as c:
if not c.hierarchy.has_subannotation_type(self.label):
properties = []
if self.save_user:
properties =[('user', str)]
for field in self.fields.all():
if field.annotation_choice == 'N':
t = float
elif field.annotation_choice == 'B':
t = bool
else:
t = str
properties.append((field.label, t))
c.hierarchy.add_subannotation_type(c, a_type, self.label, properties=properties)
def add_property(self, field):
props = []
if field.annotation_choice == 'N':
t = float
elif field.annotation_choice == 'B':
t = bool
else:
t = str
props.append((field.label, t))
with CorpusContext(self.corpus.config) as c:
c.hierarchy.add_subannotation_properties(c, self.label, props)
print(c.hierarchy.subannotations)
print(c.hierarchy.subannotation_properties)
def remove_property(self, field):
props = []
props.append(field.label)
with CorpusContext(self.corpus.config) as c:
c.hierarchy.remove_subannotation_properties(c, self.label, props)
print(c.hierarchy.subannotations)
print(c.hierarchy.subannotation_properties)
def save(self, *args, **kwargs):
a_type = self.get_item_type_display().lower()
s_type = self.label
with CorpusContext(self.corpus.config) as c:
if not c.hierarchy.has_subannotation_type(s_type):
properties = []
if self.save_user:
properties =[('user', str)]
c.hierarchy.add_subannotation_type(c, a_type, s_type, properties=properties)
super(Annotation, self).save(*args, **kwargs)
print(c.hierarchy.subannotations)
print(c.hierarchy.subannotation_properties)
def delete(self, using=None, keep_parents=False):
with CorpusContext(self.corpus.config) as c:
c.hierarchy.remove_subannotation_type(c, self.label)
super(Annotation, self).delete(using=None, keep_parents=False)
class AnnotationField(models.Model):
FIELD_CHOICES = (('C', 'Choice field'),
('S', 'String'),
('B', 'Boolean'),
('N', 'Numeric'))
annotation = models.ForeignKey(Annotation, on_delete=models.CASCADE, related_name='fields')
annotation_choice = models.CharField(max_length=1, choices=FIELD_CHOICES, default='C')
label = models.CharField(max_length=100)
def __str__(self):
return '{} {}'.format(self.annotation, self.label)
def save(self, *args, **kwargs):
super(AnnotationField, self).save(*args, **kwargs)
self.annotation.add_property(self)
def delete(self, using=None, keep_parents=False):
self.annotation.remove_property(self)
super(AnnotationField, self).delete(using=None, keep_parents=False)
class AnnotationChoice(models.Model):
annotation = models.ForeignKey(AnnotationField, on_delete=models.CASCADE, related_name='choices')
choice = models.CharField(max_length=100)
def __str__(self):
return '{} = {}'.format(self.annotation, self.choice)
| mit | -4,415,763,708,759,667,700 | 37.990566 | 101 | 0.585289 | false | 4.036133 | false | false | false |
ningirsu/stepmania-server | smserver/smutils/smpacket/smpacket.py | 1 | 34459 | """
The ```SMpacket`` module
========================
Provide easy utilisation of the stepmania protocol.
:Example:
>>> from smserver.smutils.smpacket import smcommand
>>> from smserver.smutils.smpacket import smpacket
>>> # Create a new packet instance
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="test")
>>> print(packet)
<SMPacketServerNSCCM message="test">
>>> # Binary encode your packet
>>> packet.binary
b'\\x00\\x00\\x00\\x06\\x87test\\x00'
>>> # Decode binary data
>>> packet2 = SMPacket.from_("binary", packet.binary)
>>> print(packet2)
<SMPacketServerNSCCM message="test">
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCPing)
>>> # JSON encode your packet
>>> packet.json
'{"_command": 128}'
>>> # Decode JSON data
>>> packet2 = SMPacket.from_("json", packet.json)
>>> print(packet2)
<SMPacketServerNSCPing >
"""
import json
from smserver.smutils.smpacket import smcommand
from smserver.smutils.smpacket import smencoder
class _SMPacketMetaclass(type):
"""Metaclass that implements PEP 487 protocol"""
def __init__(cls, name, bases, attrs, **kw):
super().__init__(name, bases, attrs, **kw)
parent_class = super(cls, cls)
if hasattr(parent_class, '__init_subclass_custom__'):
parent_class.__init_subclass_custom__(cls, **kw) #pylint: disable=no-member
class SMPacket(metaclass=_SMPacketMetaclass):
""" Main class for declare/parse packet """
_command_type = smcommand.SMCommand
_payload = []
_subclasses = {}
command = None
def __init__(self, **kwargs):
self.command = self.command
if "_command" in kwargs:
kwargs.pop("_command")
self.opts = kwargs
def __init_subclass_custom__(cls, **_kwargs): #pylint: disable=no-self-argument
command = cls.command
if not command:
return
if command in cls._subclasses:
raise ValueError("Command already defined")
cls._subclasses[command] = cls
def __len__(self):
return 1 + len(self.payload)
def __str__(self):
return "<%s %s>" % (
self.__class__.__name__,
" ".join(['%s="%s"' % (k, v) for k, v in self.opts.items()]))
def __repr__(self):
return "<%s %s>" % (
self.__class__.__name__,
" ".join(['%s="%s"' % (k, v) for k, v in self.opts.items()]))
def __getitem__(self, value):
return self.opts[value]
def __setitem__(self, key, value):
self.opts[key] = value
def get(self, value, default=None):
return self.opts.get(value, default)
@classmethod
def new(cls, command, **kwargs):
"""
Return an instance with the corresponding command.
If no command is found, return None
:Example:
>>> from smserver.smutils.smpacket import *
>>> print(SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg"))
<SMPacketServerNSCCM message="msg">
"""
if command not in cls._subclasses:
return None
return cls._subclasses[command](**kwargs)
@classmethod
def get_class(cls, command):
"""
Get the class which avec the corresponding command
:Example:
>>> from smserver.smutils.smpacket import *
>>> print(SMPacket.get_class(smcommand.SMServerCommand.NSCCM))
<class 'smserver.smutils.smpacket.smpacket.SMPacketServerNSCCM'>
"""
return cls._subclasses.get(command, None)
@property
def binarycommand(self):
"""
Return the command in a binary string
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binarycommand)
b'\\x87'
"""
return self.command.value.to_bytes(1, byteorder='big')
@property
def binarysize(self):
"""
Return the size of the packet in a 4 bytes string.
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binarysize)
b'\\x00\\x00\\x00\\x05'
"""
return len(self).to_bytes(4, byteorder='big')
@property
def data(self):
"""
Return the command + payload in a binary string
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.data)
b'\\x87msg\\x00'
"""
return self.binarycommand + self.payload
@property
def binary(self):
"""
Return the full binary encoded packet (size + command + payload)
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x05\\x87msg\\x00'
"""
return self.binarysize + self.data
@property
def payload(self):
"""
Return the paylaod encoded in binary
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.payload)
b'msg\\x00'
"""
return smencoder.BinaryEncoder.encode(self.opts, self._payload)
@property
def json(self):
"""
Return the JSON encoded packet
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCPing)
>>> print(packet.json)
{"_command": 128}
"""
return smencoder.JSONEncoder.encode(self.opts, self._payload, command=self.command.value)
@classmethod
def from_payload(cls, payload):
"""
Decode the given binary payload
:Example:
>>> from smserver.smutils.smpacket import *
>>> payload_data = b'msg\\x00'
>>> print(SMPacketServerNSCCM.from_payload(payload_data))
<SMPacketServerNSCCM message="msg">
"""
return cls(
**smencoder.BinaryEncoder.decode(payload, cls._payload)[1]
)
@classmethod
def from_json(cls, payload):
"""
Decode a JSON encoded packet
:Example:
>>> from smserver.smutils.smpacket import *
>>> json_data = '{"message": "msg"}'
>>> print(SMPacketServerNSCCM.from_json(json_data))
<SMPacketServerNSCCM message="msg">
"""
return cls(
**smencoder.JSONEncoder.decode(payload, cls._payload)
)
def to_(self, encoding):
"""
Encode the packet to the specified format (json or binary)
"""
return {
"json": self.json,
"binary": self.binary
}[encoding]
@classmethod
def from_(cls, encoding, data):
"""
Decode the packet from the specified format (json or binary)
"""
return {
"json": cls.parse_json,
"binary": cls.parse_binary
}[encoding](data)
@classmethod
def parse_json(cls, data):
""" Parse a JSON packet """
try:
opts = json.loads(data)
except ValueError:
return None
command = cls._command_type.get(opts.get("_command", -1))
if not command:
return None
return cls.get_class(command).from_json(data)
@classmethod
def parse_data(cls, data):
""" Parse a binary packet """
if not data:
return None
command = cls._command_type.get(data[0])
if not command:
return None
return cls.get_class(command).from_payload(data[1:])
@classmethod
def parse_binary(cls, binary):
""" Parse a binary payload """
if len(binary) < 4:
return None
return cls.parse_data(binary[4:])
class SMOPacketClient(SMPacket):
_command_type = smcommand.SMOClientCommand
class SMOPacketServer(SMPacket):
_command_type = smcommand.SMOServerCommand
class SMOPacketClientLogin(SMOPacketClient):
command = smcommand.SMOClientCommand.LOGIN
_payload = [
(smencoder.SMPayloadType.INT, "player_number", None),
(smencoder.SMPayloadType.INT, "encryption", None),
(smencoder.SMPayloadType.NT, "username", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientEnterRoom(SMOPacketClient):
command = smcommand.SMOClientCommand.ENTERROOM
_payload = [
(smencoder.SMPayloadType.INT, "enter", None),
(smencoder.SMPayloadType.NT, "room", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientCreateRoom(SMOPacketClient):
command = smcommand.SMOClientCommand.CREATEROOM
_payload = [
(smencoder.SMPayloadType.INT, "type", None),
(smencoder.SMPayloadType.NT, "title", None),
(smencoder.SMPayloadType.NT, "description", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientRoomInfo(SMOPacketClient):
command = smcommand.SMOClientCommand.ROOMINFO
_payload = [
(smencoder.SMPayloadType.NT, "room", None)
]
class SMOPacketServerLogin(SMOPacketServer):
command = smcommand.SMOServerCommand.LOGIN
_payload = [
(smencoder.SMPayloadType.INT, "approval", None),
(smencoder.SMPayloadType.NT, "text", None)
]
class SMOPacketServerRoomUpdate(SMOPacketServer):
command = smcommand.SMOServerCommand.ROOMUPDATE
_payload = [
(smencoder.SMPayloadType.INT, "type", None),
(smencoder.SMPayloadType.MAP, "room_title", ("type", {
0: (smencoder.SMPayloadType.NT, None, None),
})),
(smencoder.SMPayloadType.MAP, "room_description", ("type", {
0: (smencoder.SMPayloadType.NT, None, None),
})),
(smencoder.SMPayloadType.MAP, "room_type", ("type", {
0: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "subroom", ("type", {
0: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "nb_rooms", ("type", {
1: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "rooms", ("type", {
1: (smencoder.SMPayloadType.LIST, None, ("nb_rooms", [
(smencoder.SMPayloadType.NT, "title", None),
(smencoder.SMPayloadType.NT, "description", None),
])),
})),
(smencoder.SMPayloadType.MAP, "room_status", ("type", {
1: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_rooms")),
})),
(smencoder.SMPayloadType.MAP, "room_flags", ("type", {
1: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_rooms")),
})),
]
class SMOPacketServerGeneralInfo(SMOPacketServer):
command = smcommand.SMOServerCommand.GENERALINFO
_payload = [
(smencoder.SMPayloadType.INT, "format", None),
]
class SMOPacketServerRoomInfo(SMOPacketServer):
command = smcommand.SMOServerCommand.ROOMINFO
_payload = [
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.INT, "num_players", None),
(smencoder.SMPayloadType.INT, "max_players", None),
(smencoder.SMPayloadType.NTLIST, "players", "num_players"),
]
class SMPacketClientNSCPing(SMPacket):
"""
Client command 000. (Ping)
This command will cause server to respond with a PingR Command
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCPing()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x00'
"""
command = smcommand.SMClientCommand.NSCPing
_payload = []
class SMPacketClientNSCPingR(SMPacket):
"""
Client command 001. (Ping response)
This command is used to respond to Ping Command.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCPingR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x01'
"""
command = smcommand.SMClientCommand.NSCPingR
_payload = []
class SMPacketClientNSCHello(SMPacket):
"""
Client command 002. (Hello)
This is the first packet from a client to server.
:param int version: Client protocol version
:param str name: Name of the stepmania build
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCHello(
... name="stepmania",
... version=128
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x02\\x80stepmania\\x00'
"""
command = smcommand.SMClientCommand.NSCHello
_payload = [
(smencoder.SMPayloadType.INT, "version", None),
(smencoder.SMPayloadType.NT, "name", None)
]
class SMPacketClientNSCGSR(SMPacket):
"""
Client command 003 (Game Start Request)
This command is called once after most loading is done, and again
immediately before the sound starts.
The server has to respond with a SMPacketServerNSCGSR, if not the
client will freeze.
:param int first_player_feet: Primary player feet (0 for no player)
:param int second_player_feet: Secondary player feet (0 for no player)
:param int first_player_difficulty: Primary player difficulty (0=Beginner, 1=easy, etc.)
:param int second_player_difficulty: Secondary player difficulty (0=Beginner, 1=easy, etc.)
:param int start_position: (0 is pre-sync, 1 is for sync)
:param int reserved: ignored
:param str song_title: Title of the song to play
:param str song_subtitle: Subtitle of the song to play
:param str song_artist: Artist of the song to play
:param str course_title: Course Title
:param str song_options: Song option in string format
:param str first_player_options: Primary player's option
:param str second_player_options: Secondary player's option
"""
command = smcommand.SMClientCommand.NSCGSR
_payload = [
(smencoder.SMPayloadType.MSN, "first_player_feet", None),
(smencoder.SMPayloadType.LSN, "second_player_feet", None),
(smencoder.SMPayloadType.MSN, "first_player_difficulty", None),
(smencoder.SMPayloadType.LSN, "second_player_difficulty", None),
(smencoder.SMPayloadType.MSN, "start_position", None),
(smencoder.SMPayloadType.LSN, "reserved", None),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "course_title", None),
(smencoder.SMPayloadType.NT, "song_options", None),
(smencoder.SMPayloadType.NT, "first_player_options", None),
(smencoder.SMPayloadType.NT, "second_player_options", None),
]
class SMPacketClientNSCGON(SMPacket):
"""
Client command 004 (Game Over Notice)
This command is sent when end of game is encounter.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCGON()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x04'
"""
command = smcommand.SMClientCommand.NSCGON
class SMPacketClientNSCGSU(SMPacket):
"""
Client command 005 (Game Status update)
Update game info for each step in the game
:param int player_id: player # (0 or 1)
:param int step_id: (1: hitMine, 2: AvoidMine, ...)
:param int grade: Projected Grade (0: AAAA, 1: AAA, ...)
:param int reserved: ignored
:param int score: Actual score
:param int combo: Actual combo
:param int health: Actual health
:param int offset: Offset from the note (32767=miss)
"""
command = smcommand.SMClientCommand.NSCGSU
_payload = [
(smencoder.SMPayloadType.MSN, "player_id", None),
(smencoder.SMPayloadType.LSN, "step_id", None),
(smencoder.SMPayloadType.MSN, "grade", None),
(smencoder.SMPayloadType.LSN, "reserved", None),
(smencoder.SMPayloadType.INT, "score", 4),
(smencoder.SMPayloadType.INT, "combo", 2),
(smencoder.SMPayloadType.INT, "health", 2),
(smencoder.SMPayloadType.INT, "offset", 2)
]
class SMPacketClientNSCSU(SMPacket):
"""
Client command 006 (Style Update)
This is sent when a profile is choosed. It also indicates the number
of players in the local client. (1 or 2)
:param int nb_players: Number of players in the client (1 or 2)
:param int player_id: Player ID (0 or 1)
:param str player_name: Player name
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCSU(
... nb_players=2,
... player_id=0,
... player_name="profile1",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x06\\x02\\x00profile1\\x00'
"""
command = smcommand.SMClientCommand.NSCSU
_payload = [
(smencoder.SMPayloadType.INT, "nb_players", None),
(smencoder.SMPayloadType.INT, "player_id", None),
(smencoder.SMPayloadType.NT, "player_name", None),
]
class SMPacketClientNSCCM(SMPacket):
"""
Client command 007 (Chat Message)
The user typed a message for general chat.
:param str message: The message sent by the client.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCCM(message="Client message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x07Client message\\x00'
"""
command = smcommand.SMClientCommand.NSCCM
_payload = [
(smencoder.SMPayloadType.NT, "message", None),
]
class SMPacketClientNSCRSG(SMPacket):
"""
Client command 008 (Request Start Game)
Request Start Game and Tell server existance/non existance of song:
The user selected a song on a Net-enabled selection
:param int usage: Usage for this message
:param str song_title: Song title
:param str song_subtitle: Song artist
:param str song_artist: Song subtitle
:Example:
>>> # Client select the song ('Title', by 'Artist').
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCRSG(
... usage=2,
... song_title="Title",
... song_artist="Artist",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x08\\x02Title\\x00Artist\\x00\\x00'
"""
command = smcommand.SMClientCommand.NSCRSG
_payload = [
(smencoder.SMPayloadType.INT, "usage", 1),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
]
class SMPacketClientNSCCUUL(SMPacket):
"""
Client command 009 (reserved)
"""
command = smcommand.SMClientCommand.NSCCUUL
class SMPacketClientNSSCSMS(SMPacket):
"""
Client command 010 (User status)
Indicate where the user is
:param int action: Int enum indicating where the user is
Action available:
* 0: exited ScreenNetSelectMusic
* 1: entered ScreenNetSelectMusic
* 2: Not Sent
* 3: entered options screen
* 4: exited the evaluation screen
* 5: entered evaluation screen
* 6: exited ScreenNetRoom
* 7: entered ScreenNetRoom
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> # Client enter in room selection
>>> packet = smpacket.SMPacketClientNSSCSMS(
... action=7,
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x02\\n\\x07'
"""
command = smcommand.SMClientCommand.NSSCSMS
_payload = [
(smencoder.SMPayloadType.INT, "action", None),
]
class SMPacketClientNSCUOpts(SMPacket):
"""
Client command 011 (User options)
User has changed player's options
:param str player_0: Player 0 options
:param str player_1: Player 1 options
"""
command = smcommand.SMClientCommand.NSCUOpts
_payload = [
(smencoder.SMPayloadType.NT, "player_0", None),
(smencoder.SMPayloadType.NT, "player_1", None),
]
class SMPacketClientNSSMONL(SMPacket):
"""
Client command 012 (SMOnline Packet)
The SMLan packet 12 is a wrapper for the SMOnline packet.
:param packet: The SMOPacket to include
:type packet: SMOPacketClient
"""
command = smcommand.SMClientCommand.NSSMONL
_payload = [
(smencoder.SMPayloadType.PACKET, "packet", SMOPacketClient)
]
class SMPacketClientNSCFormatted(SMPacket):
"""
Client command 013 (reserved)
"""
command = smcommand.SMClientCommand.NSCFormatted
class SMPacketClientNSCAttack(SMPacket):
"""
Client command 014 (reserved)
"""
command = smcommand.SMClientCommand.NSCAttack
class SMPacketClientXMLPacket(SMPacket):
"""
Client command 15 (XMLPacket)
This packet contains data in XML format.
:param str xml: XML string
"""
command = smcommand.SMClientCommand.XMLPacket
_payload = [
(smencoder.SMPayloadType.NT, "xml", None),
]
class SMPacketServerNSCPing(SMPacket):
"""
Server command 128 (Ping)
This command will cause client to respond with a PingR command
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCPing()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x80'
"""
command = smcommand.SMServerCommand.NSCPing
class SMPacketServerNSCPingR(SMPacket):
"""
Server command 129 (PingR)
This command is used to respond to a Ping command.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCPingR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x81'
"""
command = smcommand.SMServerCommand.NSCPingR
class SMPacketServerNSCHello(SMPacket):
"""
Server command 130 (Hello)
This command introduces the server. (In response of Client Hello
command)
:param str version: The server protocol version (always 128)
:param str name: Name of the server
:param int key: Random key, used for hash password
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCHello(
... version=128,
... name="MyServer",
... key=999999999
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0f\\x82\\x80MyServer\\x00;\\x9a\\xc9\\xff'
"""
command = smcommand.SMServerCommand.NSCHello
_payload = [
(smencoder.SMPayloadType.INT, "version", None),
(smencoder.SMPayloadType.NT, "name", None),
(smencoder.SMPayloadType.INT, "key", 4)
]
class SMPacketServerNSCGSR(SMPacket):
"""
Server command 131 (Allow Start)
This will cause the client to start the game
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCGSR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x83'
"""
command = smcommand.SMServerCommand.NSCGSR
class SMPacketServerNSCGON(SMPacket):
"""
Server command 132 (Game over stats)
This packet is send in response to the game over packet. It
contains information regarding how well each player did.
:param int nb_players: NB of players stats in this packet (size of the next list)
:param list ids: Player's ID (calculate from the SMPacketServerNSCUUL)
:param list score: Player's score
:param list grade: Player's grade
:param list difficulty: Player's difficulty
:param list flawless: NB of flawless note
:param list perfect: NB of perfect note
:param list great: NB of great note
:param list good: NB of good note
:param list bad: NB of bad note
:param list miss: NB of miss note
:param list held: NB of held note
:param list max_combo: Player's max combo
:param list options: Player's options
"""
command = smcommand.SMServerCommand.NSCGON
_payload = [
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.INTLIST, "ids", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "score", (4, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "grade", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "difficulty", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "flawless", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "perfect", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "great", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "good", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "bad", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "miss", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "held", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "max_combo", (2, "nb_players")),
(smencoder.SMPayloadType.NTLIST, "options", "nb_players"),
]
class SMPacketServerNSCGSU(SMPacket):
"""
Server command 133 (Scoreboard update)
This will update the client's scoreboard.
:param int section: Which section to update (0: names, 1:combos, 2: grades)
:param int nb_players: Nb of plyaers in this packet
:param list options: Int list contining names, combos or grades
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCGSU(
... section=1, # Update the actual combo
... nb_players=2, # 2 users in this packet
... options=[12, 5] # List containing the combos
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x07\\x85\\x01\\x02\\x00\\x0c\\x00\\x05'
"""
command = smcommand.SMServerCommand.NSCGSU
_payload = [
(smencoder.SMPayloadType.INT, "section", 1),
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.MAP, "options", ("section", {
0: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")),
1: (smencoder.SMPayloadType.INTLIST, None, (2, "nb_players")),
2: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")),
}))
]
class SMPacketServerNSCSU(SMPacket):
"""
Server command 134 (System Message)
Send a system message to user
:param str message: The message to send
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCSU(message="System message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x86System message\\x00'
"""
command = smcommand.SMServerCommand.NSCSU
_payload = [
(smencoder.SMPayloadType.NT, "message", None)
]
class SMPacketServerNSCCM(SMPacket):
"""
Server command 135 (Chat Message)
Add a chat message to the chat window on some StepMania screens.
:param str message: The message to add
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCSU(message="Client message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x86Client message\\x00'
"""
command = smcommand.SMServerCommand.NSCCM
_payload = [
(smencoder.SMPayloadType.NT, "message", None)
]
class SMPacketServerNSCRSG(SMPacket):
"""
Server command 136 (Request Start Game)
Tell client to start song/ask if client has song
:param int usage: Usage of this message
:param str song_title: Song title
:param str song_artist: Song artist
:param str song_subtitle: Song subtitle
Usage available:
* 0: See if client has song
* 1: See if client has song, if so, scroll to song
* 2: See if client has song, if so, scroll to song, and play that song
* 3: Blindly start song
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCRSG(
... usage=0, # Check song presence
... song_title="title",
... song_artist="artist",
... song_subtitle="subtitle",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x18\\x88\\x00title\\x00artist\\x00subtitle\\x00'
"""
command = smcommand.SMServerCommand.NSCRSG
_payload = [
(smencoder.SMPayloadType.INT, "usage", 1),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
]
class SMPacketServerNSCCUUL(SMPacket):
"""
Server command 137 (Update user list)
This sends all the users currently connected
:param int max_players: NB max of players (max 255)
:param int nb_players: NB of player's in this packet
:param list players: List containing status and name for each user
"""
command = smcommand.SMServerCommand.NSCCUUL
_payload = [
(smencoder.SMPayloadType.INT, "max_players", 1),
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.LIST, "players", ("nb_players", [
(smencoder.SMPayloadType.INT, "status", 1),
(smencoder.SMPayloadType.NT, "name", None),
])
)
]
class SMPacketServerNSSCSMS(SMPacket):
"""
Server command 138
Force change to Networking select music screen.
:param str gametype: Set specified gametype
:param str style: Set specified style
"""
command = smcommand.SMServerCommand.NSSCSMS
_payload = [
(smencoder.SMPayloadType.NT, "gametype", None),
(smencoder.SMPayloadType.NT, "style", None),
]
class SMPacketServerNSCUOpts(SMPacket):
"""
Server command 139 (reserved)
"""
command = smcommand.SMServerCommand.NSCUOpts
class SMPacketServerNSSMONL(SMPacket):
"""
Server command 140 (SMOnline Packet)
The SMLan packet 140 is a wrapper for the SMOnline packet.
:param packet: The SMOPacket to include
:type packet: SMOPacketServer
"""
command = smcommand.SMServerCommand.NSSMONL
_payload = [
(smencoder.SMPayloadType.PACKET, "packet", SMOPacketServer)
]
class SMPacketServerNSCFormatted(SMPacket):
"""
Server command 141 (Formatted information packet)
Send formatted information regarding the server back to the player.
:param str server_name: Server name
:param int server_port: Port the server is listening on
:param int nb_players: Number of players connected
"""
command = smcommand.SMServerCommand.NSCFormatted
_payload = [
(smencoder.SMPayloadType.NT, "server_name", None),
(smencoder.SMPayloadType.INT, "server_port", 2),
(smencoder.SMPayloadType.INT, "nb_players", 2),
]
class SMPacketServerNSCAttack(SMPacket):
"""
Server command 142 (Attack Client)
:param int player: Player number (0 or 1)
:param int time: Duration of the attack (in ms)
:param attack: Text describing modifiers
:type attack: str or smserver.smutils.smattack.SMAttack
List of attack available are in smattack module.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> from smserver.smutils import smattack
>>> packet = smpacket.SMPacketServerNSCAttack(
... player=0, # Send the attack to the player 0
... time=1000, # The attack will last 1 second
... attack='drunk', #Send a drunk attack
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x8e\\x00\\x00\\x00\\x03\\xe8drunk\\x00'
>>> packet = smpacket.SMPacketServerNSCAttack(
... player=0,
... time=1000,
... attack=smattack.SMAttack.Drunk, # Use an Enum value
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x8e\\x00\\x00\\x00\\x03\\xe8drunk\\x00'
"""
def __init__(self, player=0, time=1000, attack=None):
if not isinstance(attack, str):
attack = attack.value
SMPacket.__init__(self, player=player, time=time, attack=attack)
command = smcommand.SMServerCommand.NSCAttack
_payload = [
(smencoder.SMPayloadType.INT, "player", 1),
(smencoder.SMPayloadType.INT, "time", 4),
(smencoder.SMPayloadType.NT, "attack", None),
]
class SMPacketServerXMLPacket(SMPacket):
"""
Server command 143 (XMLPacket)
This packet contains data in XML format.
:param str xml: XML string
"""
command = smcommand.SMServerCommand.XMLPacket
_payload = [
(smencoder.SMPayloadType.NT, "xml", None),
]
| mit | -3,107,368,309,541,007,000 | 29.712121 | 99 | 0.605212 | false | 3.478248 | false | false | false |
isazi/Transpose | analysis/manage.py | 1 | 2009 | #!/usr/bin/env python
# Copyright 2014 Alessio Sclocco <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def get_tables(queue):
"""Get a list of the tables"""
queue.execute("SHOW TABLES")
return queue.fetchall()
def create_table(queue, table):
"""Create a table to store auto-tuning results for transpose."""
queue.execute("CREATE table " + table + "(id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, M INTEGER NOT NULL, N INTEGER NOT NULL, itemsPerBlock INTEGER NOT NULL, GBs FLOAT UNSIGNED NOT NULL, time FLOAT UNSIGNED NOT NULL, time_err FLOAT UNSIGNED NOT NULL, cov FLOAT UNSIGNED NOT NULL)")
def delete_table(queue, table):
"""Delete table."""
queue.execute("DROP table " + table)
def load_file(queue, table, input_file):
"""Load input_file into a table in the database."""
for line in input_file:
if (line[0] != "#") and (line[0] != "\n"):
items = line.split(sep=" ")
queue.execute("INSERT INTO " + table + " VALUES (NULL, " + items[0] + ", " + items[1] + ", " + items[2] + ", " + items[3] + ", " + items[4] + ", " + items[5] + ", " + items[6].rstrip("\n") + ")")
def print_results(confs):
"""Print the result tuples."""
for conf in confs:
for item in conf:
print(item, end=" ")
print()
def get_M_range(queue, table, N):
"""Return the M in the scenario."""
queue.execute("SELECT DISTINCT M FROM " + table + " WHERE (N = " + N + ") ORDER BY M")
return queue.fetchall()
| apache-2.0 | 2,591,675,039,704,562,000 | 41.744681 | 289 | 0.649079 | false | 3.555752 | false | false | false |
Emergen/zivios-agent | modules/ntp.py | 1 | 2447 | """
* Copyright (c) 2008 Zivios, LLC.
*
* This file is part of Zivios.
*
* Zivios is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Zivios is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Zivios. If not, see <http://www.gnu.org/licenses/>.
*
* @package ZiviosAgent
* @copyright Copyright (c) 2008 Zivios, LLC. (http://www.zivios.org)
* @license http://www.zivios.org/legal/license
* @version $Id: Exception.php 908 2008-08-25 11:03:00Z fkhan $
* @subpackage Core
"""
from twisted.web import xmlrpc
import logging
import os
import popen2
import re
import string
import time
import datetime
from twisted.python import log
import ZiviosAgent
class ntp(ZiviosAgent.ZiviosAgent):
def xmlrpc_addService(self):
print 'ntp addService function called'
def xmlrpc_serviceStatus(self):
response,regexcode,exitcode = self.command("statusntpcommand")
return (exitcode==0)
def xmlrpc_stopService(self):
response,regexcode,exitcode = self.command("stopntpcommand")
return (exitcode==0)
def xmlrpc_startService(self):
response,regexcode,exitcode = self.command("startntpcommand")
return (exitcode==0)
def xmlrpc_currentTime(self):
now = datetime.datetime.now()
return now.ctime()
def xmlrpc_getTimezone(self):
tz,tzm = time.tzname
return tzm;
def xmlrpc_getsyncstatus(self):
#sanitizing output!
response,regexcode,exitcode = self.command("ntpq");
resp = response.split('\n')
if (len(resp) <= 2):
return -1
del resp[0:2]
length = len(resp)
del resp[length-1]
retarray = []
for a in resp:
a = a.lstrip()
a = a.rstrip()
joinarray = re.split('\s+',a)
retarray.append(joinarray)
return retarray
def xmlrpc_getGmtOffset(self):
return time.timezone/3600;
| gpl-3.0 | -4,665,748,381,308,129,000 | 28.841463 | 71 | 0.644054 | false | 3.718845 | false | false | false |
ancafarcas/superdesk-core | superdesk/datalayer.py | 1 | 4241 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import superdesk
from eve.io.base import DataLayer
from eve.io.mongo import Mongo
from eve.utils import config, ParsedRequest
from eve_elastic import Elastic, InvalidSearchString # noqa
from flask import current_app
from superdesk.lock import lock, unlock
from superdesk.json_utils import SuperdeskJSONEncoder
class SuperdeskDataLayer(DataLayer):
"""Superdesk Data Layer.
Implements eve data layer interface, is used to make eve work with superdesk service layer.
It handles app initialization and later it forwards eve calls to respective service.
"""
serializers = {}
serializers.update(Mongo.serializers)
serializers.update({'datetime': Elastic.serializers['datetime']})
def init_app(self, app):
app.data = self # app.data must be set for locks to work
self.mongo = Mongo(app)
self.driver = self.mongo.driver
self.storage = self.driver
self.elastic = Elastic(app, serializer=SuperdeskJSONEncoder(), skip_index_init=True, retry_on_timeout=True)
def pymongo(self, resource=None, prefix=None):
return self.mongo.pymongo(resource, prefix)
def init_elastic(self, app):
"""Init elastic index.
It will create index and put mapping. It should run only once so locks are in place.
Thus mongo must be already setup before running this.
"""
with app.app_context():
if lock('elastic', expire=10):
try:
self.elastic.init_index(app)
finally:
unlock('elastic')
def find(self, resource, req, lookup):
return superdesk.get_resource_service(resource).get(req=req, lookup=lookup)
def find_all(self, resource, max_results=1000):
req = ParsedRequest()
req.max_results = max_results
return self._backend(resource).find(resource, req, None)
def find_one(self, resource, req, **lookup):
return superdesk.get_resource_service(resource).find_one(req=req, **lookup)
def find_one_raw(self, resource, _id):
return self._backend(resource).find_one_raw(resource, _id)
def find_list_of_ids(self, resource, ids, client_projection=None):
return self._backend(resource).find_list_of_ids(resource, ids, client_projection)
def insert(self, resource, docs, **kwargs):
return superdesk.get_resource_service(resource).create(docs, **kwargs)
def update(self, resource, id_, updates, original):
return superdesk.get_resource_service(resource).update(id=id_, updates=updates, original=original)
def update_all(self, resource, query, updates):
datasource = self.datasource(resource)
driver = self._backend(resource).driver
collection = driver.db[datasource[0]]
return collection.update(query, {'$set': updates}, multi=True)
def replace(self, resource, id_, document, original):
return superdesk.get_resource_service(resource).replace(id=id_, document=document, original=original)
def remove(self, resource, lookup=None):
if lookup is None:
lookup = {}
return superdesk.get_resource_service(resource).delete(lookup=lookup)
def is_empty(self, resource):
return self._backend(resource).is_empty(resource)
def _search_backend(self, resource):
if resource.endswith(current_app.config['VERSIONS']):
return
datasource = self.datasource(resource)
backend = config.SOURCES.get(datasource[0], {}).get('search_backend', None)
return getattr(self, backend) if backend is not None else None
def _backend(self, resource):
datasource = self.datasource(resource)
backend = config.SOURCES.get(datasource[0], {'backend': 'mongo'}).get('backend', 'mongo')
return getattr(self, backend)
def get_mongo_collection(self, resource):
return self.mongo.pymongo('users').db[resource]
| agpl-3.0 | 5,146,778,410,217,942,000 | 37.908257 | 115 | 0.677906 | false | 4.054493 | false | false | false |
AusTac/parma | b3/parsers/et.py | 1 | 7934 | # BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2005 Michael "ThorN" Thornton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
__author__ = 'ThorN'
__version__ = '0.0.1'
import re, string
import b3
from b3.parsers.q3a.abstractParser import AbstractParser
import PunkBuster
class EtParser(AbstractParser):
gameName = 'et'
privateMsg = False
_settings = {}
_settings['line_length'] = 65
_settings['min_wrap_length'] = 90
_commands = {}
_commands['message'] = 'qsay %s %s ^8[pm]^7 %s'
_commands['say'] = 'qsay %s %s'
_commands['set'] = 'set %s %s'
_commands['kick'] = 'clientkick %s %s'
_commands['ban'] = 'banid %s %s'
_commands['tempban'] = 'clientkick %s %s'
_eventMap = {
'warmup' : b3.events.EVT_GAME_WARMUP,
'restartgame' : b3.events.EVT_GAME_ROUND_END
}
# remove the time off of the line
_lineClear = re.compile(r'^(?:[0-9:.]+\s?)?')
_lineFormats = (
#1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<pbid>[0-9A-Z]{32}):\s*(?P<name>[^:]+):\s*(?P<num1>[0-9]+):\s*(?P<num2>[0-9]+):\s*(?P<ip>[0-9.]+):(?P<port>[0-9]+))$', re.IGNORECASE),
#1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry...
#1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!!
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<name>.+):\s+(?P<text>.*))$', re.IGNORECASE),
#1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<acid>[0-9]+)\s(?P<aweap>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>.*)$', re.IGNORECASE)
)
PunkBuster = None
def startup(self):
# add the world client
client = self.clients.newBaseClient()
client.name = 'World'
client.cid = -1
client.guid = self.gameName + ':WORLD'
client.maxLevel = -1
client.hide = True
self.clients.update(client)
self.PunkBuster = PunkBuster.PunkBuster(self)
def message(self, client, text):
try:
if client == None:
self.say(text)
elif client.cid == None:
pass
else:
lines = []
for line in self.getWrap(text, self._settings['line_length'], self._settings['min_wrap_length']):
lines.append('qsay %s ^8[%s^8]^7 %s' % (self.msgPrefix, client.exactName, line))
self.writelines(lines)
except:
pass
# join
#1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794
def OnConnectinfo(self, action, data, match=None):
guid = match.group('pbid')
client = self.clients.getByCID(match.group('cid'))
if client:
if client.guid == guid:
# this is the same player
if client.exactName != match.group('name'):
client.exactName = match.group('name')
client.setName(self.stripColors(client.exactName))
return b3.events.Event(b3.events.EVT_CLIENT_JOIN, None, client)
else:
# disconnect the existing client
self.verbose('disconnect the existing client %s %s => %s %s', match.group('cid'), guid, client.cid, client)
client.disconnect()
client = self.clients.newBaseClient()
client.cid = match.group('cid')
#if match.group('guid') == '0':
# client.guid = None
#else:
client.pbid = client.guid = self.gameName + ':' + guid
client.ip = match.group('ip')
client.exactName = match.group('name')
client.name = self.stripColors(client.exactName)
self.clients.update(client)
#1579:03ClientUserinfoChangedGUID: 0 E24F9B2702B9E4A1223E905BF597FA92 n\^w[^2AS^w]^2Lead\t\3\c\3\r\0\m\0000000\s\0000000\dn\\dr\0\w\3\lw\3\sw\7\mu\0\ref\0
def OnClientuserinfochangedguid(self, action, data, match=None):
client = self.clients.getByCID(match.group('cid'))
cid, pbid, data = string.split(data, ' ', 2)
bclient = self.parseUserInfo(cid + ' ' + data)
if bclient:
self.clients.update(bclient, client)
def OnGib(self, action, data, match=None):
#1538:42Gib: 5 10 1: ^0Apache Death gibbed ^,^t^9^8that ^2guy by MOD_MACHINEGUN
victim = self.clients.getByCID(match.group('cid'))
if not victim:
self.debug('No victim')
#self.OnJ(action, data, match)
return None
attacker = self.clients.getByCID(match.group('acid'))
if not attacker:
self.debug('No attacker')
return None
event = b3.events.EVT_CLIENT_GIB
if attacker.cid == victim.cid:
event = b3.events.EVT_CLIENT_GIB_SELF
elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team:
event = b3.events.EVT_CLIENT_GIB_TEAM
return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim)
def OnKill(self, action, data, match=None):
#1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40
victim = self.clients.getByCID(match.group('cid'))
if not victim:
self.debug('No victim')
#self.OnJ(action, data, match)
return None
attacker = self.clients.getByCID(match.group('acid'))
if not attacker:
self.debug('No attacker')
return None
event = b3.events.EVT_CLIENT_KILL
if attacker.cid == victim.cid:
event = b3.events.EVT_CLIENT_SUICIDE
elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team:
event = b3.events.EVT_CLIENT_KILL_TEAM
return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim)
def OnSayteamc(self, action, data, match=None):
#1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!!
client = self.clients.getByCID(match.group('cid'))
if not client:
self.debug('No client - attempt join')
#self.OnJ(action, data, match)
#client = self.clients.getByCID(match.group('cid'))
#if not client:
return None
return b3.events.Event(b3.events.EVT_CLIENT_TEAM_SAY, match.group('text'), client)
def OnSayc(self, action, data, match=None):
#1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry...
client = self.clients.getByCID(match.group('cid'))
if not client:
self.debug('No client - attempt join')
#self.OnJ(action, data, match)
#client = self.clients.getByCID(match.group('cid'))
#if not client:
return None
return b3.events.Event(b3.events.EVT_CLIENT_SAY, match.group('text'), client) | gpl-2.0 | -8,859,111,680,739,127,000 | 38.874372 | 209 | 0.584069 | false | 3.090767 | false | false | false |
skakri/django-unstructured | wiki/core/permissions.py | 1 | 3004 | from wiki.conf import settings
###############################
# TARGET PERMISSION HANDLING #
###############################
#
# All functions are:
# can_something(target, user)
# => True/False
#
# All functions can be replaced by pointing their relevant
# settings variable in wiki.conf.settings to a callable(target, user)
def can_read(target, user):
if callable(settings.CAN_READ):
return settings.CAN_READ(target, user)
else:
# Deny reading access to deleted entities if user has no delete access
is_deleted = target.current_revision and target.deleted
if is_deleted and not target.can_delete(user):
return False
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS:
return False
elif target.other_read:
return True
elif user.is_anonymous():
return False
if user == target.owner:
return True
if target.group_read:
if target.group and user.groups.filter(id=target.group.id).exists():
return True
if target.can_moderate(user):
return True
return False
def can_write(target, user):
if callable(settings.CAN_WRITE):
return settings.CAN_WRITE(target, user)
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS_WRITE:
return False
elif target.other_write:
return True
elif user.is_anonymous():
return False
if user == target.owner:
return True
if target.group_write:
if target.group and user and user.groups.filter(id=target.group.id).exists():
return True
if target.can_moderate(user):
return True
return False
def can_assign(target, user):
if callable(settings.CAN_ASSIGN):
return settings.CAN_ASSIGN(target, user)
return not user.is_anonymous() and user.has_perm('wiki.assign')
def can_assign_owner(target, user):
if callable(settings.CAN_ASSIGN_OWNER):
return settings.CAN_ASSIGN_OWNER(target, user)
return False
def can_change_permissions(target, user):
if callable(settings.CAN_CHANGE_PERMISSIONS):
return settings.CAN_CHANGE_PERMISSIONS(target, user)
return (
not user.is_anonymous() and (
target.owner == user or
user.has_perm('wiki.assign')
)
)
def can_delete(target, user):
if callable(settings.CAN_DELETE):
return settings.CAN_DELETE(target, user)
return not user.is_anonymous() and target.can_write(user)
def can_moderate(target, user):
if callable(settings.CAN_MODERATE):
return settings.CAN_MODERATE(target, user)
return not user.is_anonymous() and user.has_perm('wiki.moderate')
def can_admin(target, user):
if callable(settings.CAN_ADMIN):
return settings.CAN_ADMIN(target, user)
return not user.is_anonymous() and user.has_perm('wiki.admin')
| gpl-3.0 | 412,424,334,553,834,300 | 31.652174 | 85 | 0.632823 | false | 3.963061 | false | false | false |
mobify/iterstuff | iterstuff/recipes.py | 1 | 4131 | from __future__ import absolute_import
from iterstuff.lookahead import Lookahead
def repeatable_takewhile(predicate, iterable):
"""
Return successive entries from an iterable as long as the
predicate evaluates to true for each entry.
Like itertools.takewhile, but does not consume the first
element of the iterable that fails the predicate test.
:param predicate: a single-element callable that returns True
for elements that satisfy a condition, False for those that
do not.
:param iterable: must be a Lookahead
"""
# Assert that the iterable is a Lookahead. The act of wrapping
# an iterable in a Lookahead consumes the first element, so we
# cannot do the wrapping inside this function.
if not isinstance(iterable, Lookahead):
raise TypeError("The iterable parameter must be a Lookahead")
# Use 'peek' to check if the next element will satisfy the
# predicate, and yield while this is True, or until we reach
# the end of the iterable.
while (not iterable.atend) and predicate(iterable.peek):
yield iterable.next()
def batch(iterable, size):
"""
Yield iterables for successive slices of `iterable`, each containing
up to `size` items, with the last being less than `size` if there are
not sufficient items in `iterable`. Pass over the input iterable once
only. Yield iterables, not lists.
@note: each output iterable must be consumed in full before the next
one is yielded. So list(batch(xrange(10), 3)) won't work as expected,
because the iterables are not consumed.
@param iterable: an input iterable.
@param size: the maximum number of items yielded by any output iterable.
"""
# Wrap an enumeration of the iterable in a Lookahead so that it
# yields (count, element) tuples
it = Lookahead(enumerate(iterable))
while not it.atend:
# Set the end_count using the count value
# of the next element.
end_count = it.peek[0] + size
# Yield a generator that will then yield up to
# 'size' elements from 'it'.
yield (
element
for counter, element in repeatable_takewhile(
# t[0] is the count part of each element
lambda t: t[0] < end_count,
it
)
)
def chunked(i, f=lambda _x: _x):
"""
Given an iterable i, apply f over it to extract a value from
each element and yield successive iterables where the result
of f for all elements is the same.
In simpler language, if i is an iterable sorted on some key, yield
chunks of that list where the key value is the same, each chunk being
a separate iterable.
Note that this function yields B{iterators}, not lists, and they refer
back to the iterator passed in, so each B{must} be consumed completely
before the next one is requested.
@param i: an iterable.
@param f: a function to be applied to each element of the iterable to
extract the key.
"""
# Build a generator that return tuples of (element, key-of-element),
# so that we only apply the key method to each element once.
it = Lookahead((_x, f(_x)) for _x in i)
def takechunk():
"""
A generator closure that will yield values while the keys remain
the same. Note that we cannot use L{itertools.takewhile} for this,
because that takes elements and B{then} checks the predicate, so
successive calls to itertools.takewhile for the same generator will
skip elements.
"""
while True:
# Always yield the first element: if we're at the end of the
# generator, this will raise StopIteration and we're done.
(_x, key) = it.next()
yield _x
# Check the lookahead's peek value to see if we should break now.
# We also break when we're at the end of the generator.
if it.atend or key != it.peek[1]:
break
# Yield successive instances of takechunk.
while not it.atend:
yield takechunk()
| mit | -8,346,311,089,307,318,000 | 36.554545 | 77 | 0.658678 | false | 4.404051 | false | false | false |
mvpoland/django-smsgateway | smsgateway/views.py | 1 | 1834 | from django import forms
from django.http import Http404
from django.conf import settings
from django.shortcuts import render
from django.contrib.admin.views.decorators import staff_member_required
from smsgateway import send, __version__
from smsgateway.backends import get_backend
accounts = getattr(settings, 'SMSGATEWAY_ACCOUNTS', {})
class BackendDebugForm(forms.Form):
account = forms.ChoiceField(choices=[(k, k) for k in list(accounts.keys()) if k != '__default__'])
recipients = forms.CharField(help_text='Separate multiple recipients with a semicolon (;).')
message = forms.CharField(widget=forms.widgets.Textarea())
signature = forms.CharField()
@staff_member_required
def backend_debug(request):
"""
A form to let you send an SMS for debugging purposes.
"""
context = {}
if request.method == 'POST':
form = BackendDebugForm(request.POST)
if form.is_valid():
success = send(
form.cleaned_data['recipients'].split(';'),
form.cleaned_data['message'],
form.cleaned_data['signature'],
form.cleaned_data['account']
)
if success:
context.update({'message': 'Text message sent'})
else:
context.update({'message': 'Sending failed'})
else:
form = BackendDebugForm()
context.update({
'form': form,
'version': __version__,
})
return render(request, 'smsgateway/backend_debug.html', context)
def backend_handle_incoming(request, backend_name):
"""
Call the backend's handle_incoming method.
"""
if backend_name == 'debug':
return backend_debug(request)
b = get_backend(backend_name)
if b is None:
raise Http404
return b.handle_incoming(request)
| bsd-3-clause | 7,037,312,430,688,589,000 | 29.566667 | 102 | 0.632497 | false | 4.196796 | false | false | false |
wcota/dynSIS-py | dynamics.py | 1 | 8344 | #!/usr/bin/env python
# ! ## File: dynamics.py
# ! ## See README.md for more information and use
# !-----------------------------------------------------------------------------
# ! SIS epidemic model algorithm based on the article
# ! Computer Physics Communications 219C (2017) pp. 303-312
# ! "Optimized Gillespie algorithms for the simulation of
# ! Markovian epidemic processes on large and heterogeneous networks"
# ! Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira
# !
# ! Please cite the above cited paper (available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> )
# ! as reference to our code.
# !
# ! This program is free software: you can redistribute it and/or modify
# ! it under the terms of the GNU General Public License as published by
# ! the Free Software Foundation, either version 3 of the License, or
# ! (at your option) any later version.
# !
# ! This program is distributed in the hope that it will be useful,
# ! but WITHOUT ANY WARRANTY; without even the implied warranty of
# ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# ! GNU General Public License for more details.
# !
# ! You should have received a copy of the GNU General Public License
# ! along with this program. If not, see <http://www.gnu.org/licenses/>.
# !-----------------------------------------------------------------------------
# ! Author : Wesley Cota
# ! Email : [email protected]
# ! Date : 27 Mar 2017
# ! Version : 1.0
# !-----------------------------------------------------------------------------
# ! See README.md for more details
# ! This code is available at <https://github.com/wcota/dynSIS-py>
# ! For performance, see <https://github.com/wcota/dynSIS> (Fortran implementation)
# ! For NetworkX library, see <https://github.com/wcota/dynSIS-networkx> (NetworkX implementation)
from network import *
from tools import *
from math import log
import sys
print( '################################################################################',
'######### Optimized Gillespie algorithms for the simulation of Markovian ######',
'####### epidemic processes on large and heterogeneous networks: SIS-OGA. #######',
'##============ Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira ============##',
'##===== Paper available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> =====##',
'##======= The codes are available at <https://github.com/wcota/dynSIS> =======##',
'##======== Please cite the above cited paper as reference to our code ========##',
'##=== This code is under GNU General Public License. Please see README.md. ===##',
'################################################################################',
'',
sep='\n')
# READING PARAMETERS
if len(sys.argv) < 3:
print_error('You must enter input and output names as arguments!')
fnInput = sys.argv[1]
fnOutput = sys.argv[2]
print_info('Reading dynamical parameters...')
dynp_sam = int(input('How much dynamics samples? '))
dynp_lb = float(input('Value of infection rate lambda (mu is defined as equal to 1): '))
dynp_tmax = int(input('Maximum time steps (it stops if the absorbing state is reached): '))
dynp_pINI = float(input('Fraction of infected vertices on the network as initial condition (is random \
for each sample): '))
# / READING PARAMETERS
# LOADING NETWORK
print_info('Loading network to memory...')
netw = readEdges(fnInput)
print_info('Everything ok!')
# / LOADING NETWORK
# PREPARING THE NECESSARY THINGS
net_kmax = max(netw.k) # Used in the rejection probability
avg_rho = np.zeros(dynp_tmax, np.float64) # Average for rho at times t, averaged
avg_t = np.zeros(dynp_tmax, np.float64)
avg_sam = np.zeros(dynp_tmax, np.int) # number of samples for each time t
avg_samSurv = np.zeros(dynp_tmax, np.int) # and of survivng ones
dyn_VI = np.zeros(netw.size, np.int) # list V^I
dyn_sig = np.zeros(netw.size, np.int) # sigma
# / PREPARING THE NECESSARY THINGS
# RUNNING DYNAMICS
print_info('Running dynamics...', True)
dyn_dt_pos_max = 0 # Auxiliar
for sam in range(1,dynp_sam+1):
print_info('Sample #'+str(sam), True)
# Initial conditions
print_info('Initial condition...')
dyn_sig[:] = 0.0
dyn_VI[:] = 0.0
dyn_NI = 0 # N_I
dyn_Nk = 0 # N_k
# Sort vertices and apply the initial condition
for i in range(0, int(netw.size*dynp_pINI)):
while True:
ver = np.random.randint(0,netw.size)
if dyn_sig[ver] == 0:
dyn_VI[dyn_NI] = ver
dyn_NI += 1
dyn_sig[ver] = 1
dyn_Nk += netw.k[ver]
break
# Run dynamics
dyn_t = 0
dyn_dt = 0.0
dyn_dt_pos = 1
print_info('Running...')
while dyn_t <= dynp_tmax and dyn_NI > 0:
# SIS-OGA ALGORITHM
# Calculate the total rate
dyn_R = (dyn_NI + 1.0*dynp_lb * dyn_Nk)
# Select the time step
rnd = max(np.random.uniform(),1e-12) # Avoid u = 0
dyn_dt = -log(rnd) / dyn_R
# Update the time
dyn_t += dyn_dt
# Probability m to heal
dyn_m = 1.0*dyn_NI / dyn_R
if np.random.uniform() < dyn_m: # Select a random occupied vertex and heal.
pos_inf = np.random.randint(0,dyn_NI)
ver = dyn_VI[pos_inf]
# Then, heal it
dyn_sig[ver] = 0
dyn_Nk -= netw.k[ver]
dyn_NI -= 1
dyn_VI[pos_inf] = dyn_VI[dyn_NI]
else: # If not, try to infect: w = 1 - m
# Select the infected vertex i with prob. proportional to k_i
while True:
pos_inf = np.random.randint(0,dyn_NI)
ver = dyn_VI[pos_inf]
if np.random.uniform() < 1.0*netw.k[ver] / (1.0*net_kmax):
break
# Select one of its neighbors
pos_nei = np.random.randint(netw.ini[ver], netw.ini[ver] + netw.k[ver])
ver = netw.con[pos_nei]
if dyn_sig[ver] == 0: # if not a phantom process, infect
dyn_sig[ver] = 1
dyn_Nk += netw.k[ver]
dyn_VI[dyn_NI] = ver # Add one element to list
dyn_NI += 1 # Increase by 1 the list
# Try to save the dynamics by time unit
while (dyn_t >= dyn_dt_pos): # Save data
avg_rho[dyn_dt_pos - 1] += 1.0*dyn_NI/netw.size
avg_t[dyn_dt_pos - 1] += dyn_t
avg_sam[dyn_dt_pos - 1] += 1
if dyn_NI != 0:
avg_samSurv[dyn_dt_pos - 1] += 1
dyn_dt_pos_max = max(dyn_dt_pos,dyn_dt_pos_max) # The maximum t with non-null rho
dyn_dt_pos += 1
# if a absorbing state is reached, exit
# Write output file
flOutput = open(fnOutput, 'wt')
print( '## ***** Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python) *****',
'#@ Network file: '+fnInput,
'#@ Number of nodes: '+str(netw.size),
'#@ Number of edges: '+str(netw.skk),
'#@ Samples: '+str(dynp_sam),
'#! Infection rate (lambda): '+str(dynp_lb),
'#! Maximum time steps: '+str(dynp_tmax),
'#! Fraction of infected vertices (initial condition): '+str(dynp_pINI),
sep='\n',
file=flOutput)
for dt_pos in range(0,dyn_dt_pos_max):
print(1.0*avg_t[dt_pos]/avg_sam[dt_pos], 1.0*avg_rho[dt_pos]/(1.0*sam),
file=flOutput)
# If you use /avg_samSurv[dt_pos] instead of /(1.0*sam) to write avg_rho (2nd column), you have
# QS analysis :)
flOutput.close()
# / RUNNING DYNAMICS
print_info('')
print_info('Everything ok!',True)
print_info('Input file (edges list): '+ fnInput)
print_info('Output file: '+ fnOutput)
print_info('')
print_info('*****Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python)*****')
print_info('Codes available at <https://github.com/wcota/dynSIS>.')
| gpl-3.0 | 8,997,114,562,609,873,000 | 40.103448 | 104 | 0.539909 | false | 3.400163 | false | false | false |
NMTHydro/Recharge | utils/TAW_optimization_subroutine/create_geo_info_file.py | 1 | 2526 | # ===============================================================================
# Copyright 2018 gabe-parrish
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= standard library imports ========================
import os
import gdal
import sys
import yaml
# ============= local library imports ===========================
def extract_geo_info(geotiff_path):
""""""
gdal.AllRegister()
# open the raster datasource
datasource_obj = gdal.Open(geotiff_path)
if datasource_obj is None:
print "Can't open the datasource from {}".format(geotiff_path)
sys.exit(1)
# get the size of image (for reading)
rows = datasource_obj.RasterYSize
cols = datasource_obj.RasterXSize
# x - cols, y - rows
dimensions = (cols, rows)
# get the projection
proj = datasource_obj.GetProjection()
# get georefference info to eventually calculate the offset:
transform = datasource_obj.GetGeoTransform()
geo_dict = {'geotransform': transform, 'dimensions': dimensions, 'projection': proj}
return geo_dict
def main(sample_file, output_path, filename):
"""
Taking a ETRM domain and saving the pertinent geo information to a text or yml file
:param sample_file: filepath to geotiff representing the ETRM model domain for the TAW optimiation
:return:
"""
geo_dict = extract_geo_info(sample_file)
# write_raster(array, geotransform, output_path, output_filename, dimensions, projection)
yml_file = os.path.join(output_path, filename)
with open(yml_file, 'w') as w_file:
yaml.dump(geo_dict, w_file)
if __name__ == "__main__":
sample_geotiff_file_path = '/Volumes/Seagate_Expansion_Drive/ETRM_espanola_aoi_inputs/statics/taw_reduced.tif'
output_path = '/Volumes/Seagate_Expansion_Drive/taw_optimization_work_folder'
main(sample_file=sample_geotiff_file_path, output_path=output_path, filename='geo_info_espanola.yml') | apache-2.0 | -6,306,104,769,306,550,000 | 33.148649 | 114 | 0.644101 | false | 3.934579 | false | false | false |
gurneyalex/odoo | addons/mrp/wizard/mrp_product_produce.py | 3 | 8943 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.tools import float_compare
class MrpProductProduce(models.TransientModel):
_name = "mrp.product.produce"
_description = "Record Production"
_inherit = ["mrp.abstract.workorder"]
@api.model
def default_get(self, fields):
res = super(MrpProductProduce, self).default_get(fields)
production = self.env['mrp.production']
production_id = self.env.context.get('default_production_id') or self.env.context.get('active_id')
if production_id:
production = self.env['mrp.production'].browse(production_id)
if production.exists():
serial_finished = (production.product_id.tracking == 'serial')
todo_uom = production.product_uom_id.id
todo_quantity = self._get_todo(production)
if serial_finished:
todo_quantity = 1.0
if production.product_uom_id.uom_type != 'reference':
todo_uom = self.env['uom.uom'].search([('category_id', '=', production.product_uom_id.category_id.id), ('uom_type', '=', 'reference')]).id
if 'production_id' in fields:
res['production_id'] = production.id
if 'product_id' in fields:
res['product_id'] = production.product_id.id
if 'product_uom_id' in fields:
res['product_uom_id'] = todo_uom
if 'serial' in fields:
res['serial'] = bool(serial_finished)
if 'qty_producing' in fields:
res['qty_producing'] = todo_quantity
if 'consumption' in fields:
res['consumption'] = production.bom_id.consumption
return res
serial = fields.Boolean('Requires Serial')
product_tracking = fields.Selection(related="product_id.tracking")
is_pending_production = fields.Boolean(compute='_compute_pending_production')
move_raw_ids = fields.One2many(related='production_id.move_raw_ids', string="PO Components")
move_finished_ids = fields.One2many(related='production_id.move_finished_ids')
raw_workorder_line_ids = fields.One2many('mrp.product.produce.line',
'raw_product_produce_id', string='Components')
finished_workorder_line_ids = fields.One2many('mrp.product.produce.line',
'finished_product_produce_id', string='By-products')
production_id = fields.Many2one('mrp.production', 'Manufacturing Order',
required=True, ondelete='cascade')
@api.depends('qty_producing')
def _compute_pending_production(self):
""" Compute if it exits remaining quantity once the quantity on the
current wizard will be processed. The purpose is to display or not
button 'continue'.
"""
for product_produce in self:
remaining_qty = product_produce._get_todo(product_produce.production_id)
product_produce.is_pending_production = remaining_qty - product_produce.qty_producing > 0.0
def continue_production(self):
""" Save current wizard and directly opens a new. """
self.ensure_one()
self._record_production()
action = self.production_id.open_produce_product()
action['context'] = {'default_production_id': self.production_id.id}
return action
def action_generate_serial(self):
self.ensure_one()
product_produce_wiz = self.env.ref('mrp.view_mrp_product_produce_wizard', False)
self.finished_lot_id = self.env['stock.production.lot'].create({
'product_id': self.product_id.id,
'company_id': self.production_id.company_id.id
})
return {
'name': _('Produce'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'mrp.product.produce',
'res_id': self.id,
'view_id': product_produce_wiz.id,
'target': 'new',
}
def do_produce(self):
""" Save the current wizard and go back to the MO. """
self.ensure_one()
self._record_production()
self._check_company()
return {'type': 'ir.actions.act_window_close'}
def _get_todo(self, production):
""" This method will return remaining todo quantity of production. """
main_product_moves = production.move_finished_ids.filtered(lambda x: x.product_id.id == production.product_id.id)
todo_quantity = production.product_qty - sum(main_product_moves.mapped('quantity_done'))
todo_quantity = todo_quantity if (todo_quantity > 0) else 0
return todo_quantity
def _record_production(self):
# Check all the product_produce line have a move id (the user can add product
# to consume directly in the wizard)
for line in self._workorder_line_ids():
if not line.move_id:
# Find move_id that would match
if line.raw_product_produce_id:
moves = line.raw_product_produce_id.move_raw_ids
else:
moves = line.finished_product_produce_id.move_finished_ids
move_id = moves.filtered(lambda m: m.product_id == line.product_id and m.state not in ('done', 'cancel'))
if not move_id:
# create a move to assign it to the line
production = line._get_production()
if line.raw_product_produce_id:
values = {
'name': production.name,
'reference': production.name,
'product_id': line.product_id.id,
'product_uom': line.product_uom_id.id,
'location_id': production.location_src_id.id,
'location_dest_id': self.product_id.property_stock_production.id,
'raw_material_production_id': production.id,
'group_id': production.procurement_group_id.id,
'origin': production.name,
'state': 'confirmed',
'company_id': production.company_id.id,
}
else:
values = production._get_finished_move_value(line.product_id.id, 0, line.product_uom_id.id)
move_id = self.env['stock.move'].create(values)
line.move_id = move_id.id
# because of an ORM limitation (fields on transient models are not
# recomputed by updates in non-transient models), the related fields on
# this model are not recomputed by the creations above
self.invalidate_cache(['move_raw_ids', 'move_finished_ids'])
# Save product produce lines data into stock moves/move lines
for wizard in self:
quantity = wizard.qty_producing
if float_compare(quantity, 0, precision_rounding=self.product_uom_id.rounding) <= 0:
raise UserError(_("The production order for '%s' has no quantity specified.") % self.product_id.display_name)
self._update_finished_move()
self._update_moves()
self.production_id.filtered(lambda mo: mo.state == 'confirmed').write({
'date_start': datetime.now(),
})
class MrpProductProduceLine(models.TransientModel):
_name = 'mrp.product.produce.line'
_inherit = ["mrp.abstract.workorder.line"]
_description = "Record production line"
raw_product_produce_id = fields.Many2one('mrp.product.produce', 'Component in Produce wizard')
finished_product_produce_id = fields.Many2one('mrp.product.produce', 'Finished Product in Produce wizard')
@api.model
def _get_raw_workorder_inverse_name(self):
return 'raw_product_produce_id'
@api.model
def _get_finished_workoder_inverse_name(self):
return 'finished_product_produce_id'
def _get_final_lots(self):
product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id
return product_produce_id.finished_lot_id | product_produce_id.finished_workorder_line_ids.mapped('lot_id')
def _get_production(self):
product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id
return product_produce_id.production_id
@api.onchange('lot_id')
def _onchange_lot_id(self):
""" When the user is encoding a produce line for a tracked product, we apply some logic to
help him. This onchange will automatically switch `qty_done` to 1.0.
"""
if self.product_id.tracking == 'serial':
if self.lot_id:
self.qty_done = 1
else:
self.qty_done = 0
| agpl-3.0 | -4,611,204,496,198,219,300 | 45.82199 | 158 | 0.603377 | false | 4.055782 | false | false | false |
makinacorpus/reportlab-ecomobile | src/reportlab/graphics/charts/doughnut.py | 1 | 13260 | #Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/doughnut.py
# doughnut chart
__version__=''' $Id$ '''
__doc__="""Doughnut chart
Produces a circular chart like the doughnut charts produced by Excel.
Can handle multiple series (which produce concentric 'rings' in the chart).
"""
import copy
from math import sin, cos, pi
from types import ListType, TupleType
from reportlab.lib import colors
from reportlab.lib.validators import isColor, isNumber, isListOfNumbersOrNone,\
isListOfNumbers, isColorOrNone, isString,\
isListOfStringsOrNone, OneOf, SequenceOf,\
isBoolean, isListOfColors,\
isNoneOrListOfNoneOrStrings,\
isNoneOrListOfNoneOrNumbers,\
isNumberOrNone
from reportlab.lib.attrmap import *
from reportlab.pdfgen.canvas import Canvas
from reportlab.graphics.shapes import Group, Drawing, Line, Rect, Polygon, Ellipse, \
Wedge, String, SolidShape, UserNode, STATE_DEFAULTS
from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder
from reportlab.graphics.charts.piecharts import AbstractPieChart, WedgeProperties, _addWedgeLabel
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics.widgets.markers import Marker
class SectorProperties(WedgeProperties):
"""This holds descriptive information about the sectors in a doughnut chart.
It is not to be confused with the 'sector itself'; this just holds
a recipe for how to format one, and does not allow you to hack the
angles. It can format a genuine Sector object for you with its
format method.
"""
_attrMap = AttrMap(BASE=WedgeProperties,
)
class Doughnut(AbstractPieChart):
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc='X position of the chart within its container.'),
y = AttrMapValue(isNumber, desc='Y position of the chart within its container.'),
width = AttrMapValue(isNumber, desc='width of doughnut bounding box. Need not be same as width.'),
height = AttrMapValue(isNumber, desc='height of doughnut bounding box. Need not be same as height.'),
data = AttrMapValue(None, desc='list of numbers defining sector sizes; need not sum to 1'),
labels = AttrMapValue(isListOfStringsOrNone, desc="optional list of labels to use for each data point"),
startAngle = AttrMapValue(isNumber, desc="angle of first slice; like the compass, 0 is due North"),
direction = AttrMapValue(OneOf('clockwise', 'anticlockwise'), desc="'clockwise' or 'anticlockwise'"),
slices = AttrMapValue(None, desc="collection of sector descriptor objects"),
simpleLabels = AttrMapValue(isBoolean, desc="If true(default) use String not super duper WedgeLabel"),
)
def __init__(self):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
self.data = [1,1]
self.labels = None # or list of strings
self.startAngle = 90
self.direction = "clockwise"
self.simpleLabels = 1
self.slices = TypedPropertyCollection(SectorProperties)
self.slices[0].fillColor = colors.darkcyan
self.slices[1].fillColor = colors.blueviolet
self.slices[2].fillColor = colors.blue
self.slices[3].fillColor = colors.cyan
def demo(self):
d = Drawing(200, 100)
dn = Doughnut()
dn.x = 50
dn.y = 10
dn.width = 100
dn.height = 80
dn.data = [10,20,30,40,50,60]
dn.labels = ['a','b','c','d','e','f']
dn.slices.strokeWidth=0.5
dn.slices[3].popout = 10
dn.slices[3].strokeWidth = 2
dn.slices[3].strokeDashArray = [2,2]
dn.slices[3].labelRadius = 1.75
dn.slices[3].fontColor = colors.red
dn.slices[0].fillColor = colors.darkcyan
dn.slices[1].fillColor = colors.blueviolet
dn.slices[2].fillColor = colors.blue
dn.slices[3].fillColor = colors.cyan
dn.slices[4].fillColor = colors.aquamarine
dn.slices[5].fillColor = colors.cadetblue
dn.slices[6].fillColor = colors.lightcoral
d.add(dn)
return d
def normalizeData(self, data=None):
from operator import add
sum = float(reduce(add,data,0))
return abs(sum)>=1e-8 and map(lambda x,f=360./sum: f*x, data) or len(data)*[0]
def makeSectors(self):
# normalize slice data
if type(self.data) in (ListType, TupleType) and type(self.data[0]) in (ListType, TupleType):
#it's a nested list, more than one sequence
normData = []
n = []
for l in self.data:
t = self.normalizeData(l)
normData.append(t)
n.append(len(t))
self._seriesCount = max(n)
else:
normData = self.normalizeData(self.data)
n = len(normData)
self._seriesCount = n
#labels
if self.labels is None:
labels = []
if type(n) not in (ListType,TupleType):
labels = [''] * n
else:
for m in n:
labels = list(labels) + [''] * m
else:
labels = self.labels
#there's no point in raising errors for less than enough labels if
#we silently create all for the extreme case of no labels.
if type(n) not in (ListType,TupleType):
i = n-len(labels)
if i>0:
labels = list(labels) + [''] * i
else:
tlab = 0
for m in n:
tlab += m
i = tlab-len(labels)
if i>0:
labels = list(labels) + [''] * i
xradius = self.width/2.0
yradius = self.height/2.0
centerx = self.x + xradius
centery = self.y + yradius
if self.direction == "anticlockwise":
whichWay = 1
else:
whichWay = -1
g = Group()
sn = 0
startAngle = self.startAngle #% 360
styleCount = len(self.slices)
if type(self.data[0]) in (ListType, TupleType):
#multi-series doughnut
iradius = (self.height/5.0)/len(self.data)
for series in normData:
i = 0
for angle in series:
endAngle = (startAngle + (angle * whichWay)) #% 360
if abs(startAngle-endAngle)>=1e-5:
if startAngle < endAngle:
a1 = startAngle
a2 = endAngle
else:
a1 = endAngle
a2 = startAngle
#if we didn't use %stylecount here we'd end up with the later sectors
#all having the default style
sectorStyle = self.slices[i%styleCount]
# is it a popout?
cx, cy = centerx, centery
if sectorStyle.popout != 0:
# pop out the sector
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle * pi/180.0
popdistance = sectorStyle.popout
cx = centerx + popdistance * cos(aveAngleRadians)
cy = centery + popdistance * sin(aveAngleRadians)
if type(n) in (ListType,TupleType):
theSector = Wedge(cx, cy, xradius+(sn*iradius)-iradius, a1, a2, yradius=yradius+(sn*iradius)-iradius, radius1=yradius+(sn*iradius)-(2*iradius))
else:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius)
theSector.fillColor = sectorStyle.fillColor
theSector.strokeColor = sectorStyle.strokeColor
theSector.strokeWidth = sectorStyle.strokeWidth
theSector.strokeDashArray = sectorStyle.strokeDashArray
g.add(theSector)
startAngle = endAngle
text = self.getSeriesName(i,'')
if text:
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle*pi/180.0
labelRadius = sectorStyle.labelRadius
labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius)
labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius)
g.add(_addWedgeLabel(self,text,averageAngle,labelX,labelY,sectorStyle))
i += 1
sn += 1
else:
i = 0
#single series doughnut
iradius = self.height/5.0
for angle in normData:
endAngle = (startAngle + (angle * whichWay)) #% 360
if abs(startAngle-endAngle)>=1e-5:
if startAngle < endAngle:
a1 = startAngle
a2 = endAngle
else:
a1 = endAngle
a2 = startAngle
#if we didn't use %stylecount here we'd end up with the later sectors
#all having the default style
sectorStyle = self.slices[i%styleCount]
# is it a popout?
cx, cy = centerx, centery
if sectorStyle.popout != 0:
# pop out the sector
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle * pi/180.0
popdistance = sectorStyle.popout
cx = centerx + popdistance * cos(aveAngleRadians)
cy = centery + popdistance * sin(aveAngleRadians)
if n > 1:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius)
elif n==1:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, iradius=iradius)
theSector.fillColor = sectorStyle.fillColor
theSector.strokeColor = sectorStyle.strokeColor
theSector.strokeWidth = sectorStyle.strokeWidth
theSector.strokeDashArray = sectorStyle.strokeDashArray
g.add(theSector)
# now draw a label
if labels[i] != "":
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle*pi/180.0
labelRadius = sectorStyle.labelRadius
labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius)
labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius)
theLabel = String(labelX, labelY, labels[i])
theLabel.textAnchor = "middle"
theLabel.fontSize = sectorStyle.fontSize
theLabel.fontName = sectorStyle.fontName
theLabel.fillColor = sectorStyle.fontColor
g.add(theLabel)
startAngle = endAngle
i += 1
return g
def draw(self):
g = Group()
g.add(self.makeSectors())
return g
def sample1():
"Make up something from the individual Sectors"
d = Drawing(400, 400)
g = Group()
s1 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=0, endangledegrees=120, radius1=100)
s1.fillColor=colors.red
s1.strokeColor=None
d.add(s1)
s2 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=120, endangledegrees=240, radius1=100)
s2.fillColor=colors.green
s2.strokeColor=None
d.add(s2)
s3 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=240, endangledegrees=260, radius1=100)
s3.fillColor=colors.blue
s3.strokeColor=None
d.add(s3)
s4 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=260, endangledegrees=360, radius1=100)
s4.fillColor=colors.gray
s4.strokeColor=None
d.add(s4)
return d
def sample2():
"Make a simple demo"
d = Drawing(400, 400)
dn = Doughnut()
dn.x = 50
dn.y = 50
dn.width = 300
dn.height = 300
dn.data = [10,20,30,40,50,60]
d.add(dn)
return d
def sample3():
"Make a more complex demo"
d = Drawing(400, 400)
dn = Doughnut()
dn.x = 50
dn.y = 50
dn.width = 300
dn.height = 300
dn.data = [[10,20,30,40,50,60], [10,20,30,40]]
dn.labels = ['a','b','c','d','e','f']
d.add(dn)
return d
if __name__=='__main__':
from reportlab.graphics.renderPDF import drawToFile
d = sample1()
drawToFile(d, 'doughnut1.pdf')
d = sample2()
drawToFile(d, 'doughnut2.pdf')
d = sample3()
drawToFile(d, 'doughnut3.pdf')
| bsd-3-clause | 7,873,950,994,067,001,000 | 36.994269 | 167 | 0.556259 | false | 3.9 | false | false | false |
zentralopensource/zentral | zentral/utils/rison.py | 1 | 2938 | # from https://github.com/pifantastic/python-rison
# encode a json payload in rison
# used in kibana urls
import re
IDCHAR_PUNCTUATION = '_-./~'
NOT_IDCHAR = ''.join([c for c in (chr(i) for i in range(127))
if not (c.isalnum() or c in IDCHAR_PUNCTUATION)])
# Additionally, we need to distinguish ids and numbers by first char.
NOT_IDSTART = '-0123456789'
# Regexp string matching a valid id.
IDRX = ('[^' + NOT_IDSTART + NOT_IDCHAR + '][^' + NOT_IDCHAR + ']*')
# Regexp to check for valid rison ids.
ID_OK_RE = re.compile('^' + IDRX + '$', re.M)
class Encoder(object):
def __init__(self):
pass
@staticmethod
def encoder(v):
if isinstance(v, list):
return Encoder.list
elif isinstance(v, str):
return Encoder.string
elif isinstance(v, bool):
return Encoder.bool
elif isinstance(v, (float, int)):
return Encoder.number
elif isinstance(v, type(None)):
return Encoder.none
elif isinstance(v, dict):
return Encoder.dict
else:
raise AssertionError('Unable to encode type: {0}'.format(type(v)))
@staticmethod
def encode(v):
encoder = Encoder.encoder(v)
return encoder(v)
@staticmethod
def list(x):
a = ['!(']
b = None
for i in range(len(x)):
v = x[i]
f = Encoder.encoder(v)
if f:
v = f(v)
if isinstance(v, str):
if b:
a.append(',')
a.append(v)
b = True
a.append(')')
return ''.join(a)
@staticmethod
def number(v):
return str(v).replace('+', '')
@staticmethod
def none(_):
return '!n'
@staticmethod
def bool(v):
return '!t' if v else '!f'
@staticmethod
def string(v):
if v == '':
return "''"
if ID_OK_RE.match(v):
return v
def replace(match):
if match.group(0) in ["'", '!']:
return '!' + match.group(0)
return match.group(0)
v = re.sub(r'([\'!])', replace, v)
return "'" + v + "'"
@staticmethod
def dict(x):
a = ['(']
b = None
ks = sorted(x.keys())
for i in ks:
v = x[i]
f = Encoder.encoder(v)
if f:
v = f(v)
if isinstance(v, str):
if b:
a.append(',')
a.append(Encoder.string(i))
a.append(':')
a.append(v)
b = True
a.append(')')
return ''.join(a)
def dumps(o):
if not isinstance(o, (dict, list)) or o is None:
raise TypeError('object must be a dict a list or None')
return Encoder.encode(o)
| apache-2.0 | 1,428,220,077,565,753,000 | 23.483333 | 78 | 0.469367 | false | 3.790968 | false | false | false |
quodlibetor/dedupe | setup.py | 1 | 1174 | from __future__ import with_statement
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup
import os
with open(os.path.join(os.path.dirname(__file__),"README.rst"), 'r') as fh:
long_desc = fh.read()
VERSION = "0.1.2"
setup(name="dedupe",
version=VERSION,
description="A thing to detect duplicate music",
long_description=long_desc,
author="Brandon W Maister",
author_email="[email protected]",
url="http://bitbucket.org/quodlibetor/dedupe",
py_modules=['dedupe', 'distribute_setup', 'setup'],
entry_points= {'console_scripts': [
'dedupe = dedupe:main'
]},
install_requires=['mutagen', 'argparse'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5", # min
"Operating System :: OS Independent", # I think?
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
]
)
| gpl-3.0 | 8,757,693,658,711,555,000 | 31.611111 | 83 | 0.61414 | false | 3.811688 | false | true | false |
RITct/Rita | app/secret_sauce/seqtoseq_model.py | 1 | 11243 | import random
import torch
import torch.nn as nn
from torch.autograd import Variable
from torch import optim
import torch.nn.functional as F
import pickle as pk
use_cuda = torch.cuda.is_available()
SOS_token = 0
EOS_token = 1
MAX_LENGTH = 80
class Dataset:
def __init__(self, name):
self.name = name
self.word2index = {}
self.word2count = {}
self.index2word = {0: "SOS", 1: "EOS"}
self.n_words = 2 # Count SOS and EOS
def addSentence(self, sentence):
for word in sentence.split(' '):
self.addWord(word)
def addWord(self, word):
if word not in self.word2index:
self.word2index[word] = self.n_words
self.word2count[word] = 1
self.index2word[self.n_words] = word
self.n_words += 1
else:
self.word2count[word] += 1
def dataclean(training_data):
input_data = Dataset('input')
output_data = Dataset('ouput')
for pair in training_data:
input_data.addSentence(pair[0])
output_data.addSentence(pair[1])
return input_data, output_data, input_data.n_words, output_data.n_words
class EncoderRNN(nn.Module):
def __init__(self, input_size, hidden_size, n_layers=1):
super(EncoderRNN, self).__init__()
self.n_layers = n_layers
self.hidden_size = hidden_size
if use_cuda:
self.embedding = nn.Embedding(input_size, hidden_size).cuda()
self.gru = nn.GRU(hidden_size, hidden_size).cuda()
else:
self.embedding = nn.Embedding(input_size, hidden_size)
self.gru = nn.GRU(hidden_size, hidden_size)
def forward(self, input, hidden):
embedded = self.embedding(input).view(1, 1, -1)
output = embedded
for i in range(self.n_layers):
output, hidden = self.gru(output, hidden)
output = output.cuda() if use_cuda else output
hidden = hidden.cuda() if use_cuda else output
return output, hidden
def initHidden(self):
result = Variable(torch.zeros(1, 1, self.hidden_size))
if use_cuda:
return result.cuda()
return result
class AttnDecoderRNN(nn.Module):
def __init__(self, hidden_size, output_size, n_layers=1, dropout_p=0.1, max_length=MAX_LENGTH):
super(AttnDecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers = n_layers
self.dropout_p = dropout_p
self.max_length = max_length
if use_cuda:
self.embedding = nn.Embedding(self.output_size, self.hidden_size).cuda()
self.attn = nn.Linear(self.hidden_size * 2, self.max_length).cuda()
self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size).cuda()
self.dropout = nn.Dropout(self.dropout_p).cuda()
self.gru = nn.GRU(self.hidden_size, self.hidden_size).cuda()
self.out = nn.Linear(self.hidden_size, self.output_size).cuda()
else:
self.embedding = nn.Embedding(self.output_size, self.hidden_size)
self.attn = nn.Linear(self.hidden_size * 2, self.max_length)
self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size)
self.dropout = nn.Dropout(self.dropout_p)
self.gru = nn.GRU(self.hidden_size, self.hidden_size)
self.out = nn.Linear(self.hidden_size, self.output_size)
def forward(self, input, hidden, encoder_output, encoder_outputs):
embedded = self.embedding(input).view(1, 1, -1)
embedded = self.dropout(embedded)
attn_weights = F.softmax(
self.attn(torch.cat((embedded[0], hidden[0]), 1)))
attn_weights = attn_weights.cuda() if use_cuda else attn_weights
attn_applied = torch.bmm(attn_weights.unsqueeze(0),
encoder_outputs.unsqueeze(0))
attn_applied = attn_applied.cuda() if use_cuda else attn_applied
output = torch.cat((embedded[0], attn_applied[0]), 1)
output = output.cuda() if use_cuda else output
output = self.attn_combine(output).unsqueeze(0)
for i in range(self.n_layers):
output = F.relu(output)
output = output.cuda() if use_cuda else output
output, hidden = self.gru(output, hidden)
output = F.log_softmax(self.out(output[0]))
output = output.cuda() if use_cuda else output
return output, hidden, attn_weights
def initHidden(self):
result = Variable(torch.zeros(1, 1, self.hidden_size))
if use_cuda:
return result.cuda()
return result
def indexesFromSentence(lang, sentence):
out = []
for word in sentence.split(' '):
if word not in lang.word2index:
continue
k = lang.word2index[word]
out.append(k)
return out
def variableFromSentence(lang, sentence):
indexes = indexesFromSentence(lang, sentence)
indexes.append(EOS_token)
result = Variable(torch.LongTensor(indexes).view(-1, 1))
if use_cuda:
return result.cuda()
return result
def variablesFromPair(pair, input_lang, output_lang):
input_variable = variableFromSentence(input_lang, pair[0])
target_variable = variableFromSentence(output_lang, pair[1])
return (input_variable, target_variable)
teacher_forcing_ratio = 0.5
def train(input_variable, target_variable, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, max_length=MAX_LENGTH):
encoder_hidden = encoder.initHidden()
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
input_length = input_variable.size()[0]
target_length = target_variable.size()[0]
encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size))
encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs
loss = 0
for ei in range(input_length):
encoder_output, encoder_hidden = encoder(
input_variable[ei], encoder_hidden)
encoder_outputs[ei] = encoder_output[0][0]
decoder_input = Variable(torch.LongTensor([[SOS_token]]))
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
decoder_hidden = encoder_hidden
use_teacher_forcing = True if random.random() < teacher_forcing_ratio else False
if use_teacher_forcing:
# Teacher forcing: Feed the target as the next input
for di in range(target_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
loss += criterion(decoder_output[0], target_variable[di])
decoder_input = target_variable[di] # Teacher forcing
else:
# Without teacher forcing: use its own predictions as the next input
for di in range(target_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
decoder_input = Variable(torch.LongTensor([[ni]]))
loss += criterion(decoder_output[0], target_variable[di])
if ni == EOS_token:
break
loss.backward()
encoder_optimizer.step()
decoder_optimizer.step()
return loss.data[0] / target_length
def seqtoseq_train(n_iters, training_data,print_every=1000, learning_rate=0.01, tfl=False):
print_loss_total = 0
hidden_size = 256
in_lang, out_lang, inwords, outwords = dataclean(training_data)
metadata = open('app/brain/seqtoseq_meta.pkl', 'wb')
pk.dump([in_lang, out_lang], metadata)
if tfl == False:
encoder = EncoderRNN(inwords, hidden_size)
decoder = AttnDecoderRNN(hidden_size, outwords, dropout_p=0.1)
else:
encoder = torch.load('app/brain/encoder.pt')
decoder = torch.load('app/brain/decoder.pt')
if use_cuda:
encoder = encoder.cuda()
decoder = decoder.cuda()
encoder_optimizer = optim.SGD(encoder.parameters(), lr=learning_rate)
decoder_optimizer = optim.SGD(decoder.parameters(), lr=learning_rate)
training_data = [variablesFromPair(random.choice(training_data),in_lang,out_lang)
for i in range(n_iters)]
criterion = nn.NLLLoss()
if use_cuda:
criterion = criterion.cuda()
for iter in range(1, n_iters + 1):
training_pair = training_data[iter - 1]
input_variable = training_pair[0]
target_variable = training_pair[1]
loss = train(input_variable, target_variable, encoder,
decoder, encoder_optimizer, decoder_optimizer, criterion)
print_loss_total += loss
accuracy = 100-(loss*100)
if accuracy < 0:
accuracy = 0
if iter%1000 == 0:
print(accuracy,"%")
torch.save(encoder, 'app/brain/encoder.pt')
torch.save(decoder, 'app/brain/decoder.pt')
def evaluate(encoder, decoder, input_lang, output_lang, sentence, max_length=MAX_LENGTH):
input_variable = variableFromSentence(input_lang, sentence)
input_length = input_variable.size()[0]
encoder_hidden = encoder.initHidden()
encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size))
encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs
for ei in range(input_length):
encoder_output, encoder_hidden = encoder(input_variable[ei],
encoder_hidden)
encoder_outputs[ei] = encoder_outputs[ei] + encoder_output[0][0]
decoder_input = Variable(torch.LongTensor([[SOS_token]])) # SOS
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
decoder_hidden = encoder_hidden
decoded_words = []
decoder_attentions = torch.zeros(max_length, max_length)
for di in range(max_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
decoder_attentions[di] = decoder_attention.data
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
if ni == EOS_token:
decoded_words.append('<EOS>')
break
else:
decoded_words.append(output_lang.index2word[ni])
decoder_input = Variable(torch.LongTensor([[ni]]))
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
return decoded_words, decoder_attentions[:di + 1]
def reply_predict(sentence):
try:
encoder = torch.load('app/brain/encoder.pt')
decoder = torch.load('app/brain/decoder.pt')
with open('app/brain/seqtoseq_meta.pkl','rb') as pickle_file:
meta = pk.load(pickle_file)
input_lang = meta[0]
output_lang = meta[1]
output_words, attentions = evaluate(encoder, decoder, input_lang, output_lang, sentence)
output_sentence = ' '.join(output_words)
return output_sentence.split("<EOS>")[0]
except(KeyError):
return random.choice(["sorry i didnt get that","no idea", "i may be malfunctioning", "sorry this is a prototype"])
| bsd-3-clause | -6,247,866,893,398,659,000 | 35.385113 | 133 | 0.631771 | false | 3.626774 | false | false | false |
nonZero/demos-python | src/examples/short/object_oriented/static_method_6.py | 1 | 1045 | #!/usr/bin/python2
'''
An example for using class methods to keep per class properties.
Once set, subclass properties shadows properties on the base class.
'''
from __future__ import print_function
class Book(object):
num = 0
def __init__(self, title):
self.title = title
self.id = self.increment_num()
print('Created:', self)
@classmethod
def increment_num(cls):
cls.num += 1
return cls.num
def __str__(self):
return '<{} #{}: {}>'.format(self.__class__.__name__, self.id, self.title)
b1 = Book('Guinness Book of Records')
b2 = Book('The Bible')
print('Book.num:', Book.num)
print('b1.num:', b1.num)
print()
class FictionBook(Book):
num = 0 # Removing me voids warranty
print('Book.num:', Book.num)
print('FictionBook.num:', FictionBook.num)
print()
b3 = FictionBook('Sherlock Holmes')
b4 = FictionBook('Danny Din')
b5 = FictionBook('Kofiko')
print()
print('Book.num:', Book.num)
print('FictionBook.num:', FictionBook.num)
print()
b6 = Book('Britannica')
| gpl-3.0 | -7,061,124,976,053,368,000 | 19.096154 | 82 | 0.636364 | false | 3.091716 | false | false | false |
YtvwlD/yarfi | etc/yarfi/services/console_setup.py | 1 | 1300 | # YARFI - Yet Another Replacement For Init
# Copyright (C) 2014 Niklas Sombert
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen
from yarfi.ServicesAndTargets import Service as Srv
class Service(Srv):
def __init__(self):
self.description = "set the console font and keyboard layout"
self.depends = ["system", "udev"]
self.conflicts = []
self.respawn = True
self.status_ = ""
self.process = None
def start(self):
self.process = Popen(["/bin/setupcon"]) #use --force? (and --save?)
def status(self):
if self.status_ == "stopped":
return ("stopped")
if self.process:
if self.process.poll() is not None:
self.status_ = "running"
return ("running") | gpl-3.0 | -6,666,950,791,183,914,000 | 32.358974 | 71 | 0.717692 | false | 3.581267 | false | false | false |
cydenix/OpenGLCffi | OpenGLCffi/EGL/EXT/EXT/output_base.py | 1 | 1086 | from OpenGLCffi.EGL import params
@params(api='egl', prms=['dpy', 'attrib_list', 'layers', 'max_layers', 'num_layers'])
def eglGetOutputLayersEXT(dpy, attrib_list, layers, max_layers, num_layers):
pass
@params(api='egl', prms=['dpy', 'attrib_list', 'ports', 'max_ports', 'num_ports'])
def eglGetOutputPortsEXT(dpy, attrib_list, ports, max_ports, num_ports):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglQueryOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'name'])
def eglQueryOutputLayerStringEXT(dpy, layer, name):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglQueryOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'name'])
def eglQueryOutputPortStringEXT(dpy, port, name):
pass
| mit | 6,723,218,600,509,176,000 | 25.487805 | 85 | 0.689687 | false | 2.749367 | false | false | false |
Edraak/circleci-edx-platform | common/djangoapps/edraak_i18n/management/commands/i18n_edraak_theme_push.py | 1 | 2382 | # * Handling merge/forks of UserProfile.meta
from django.core.management.base import BaseCommand
from django.conf import settings
import os
from subprocess import call
import polib
class Command(BaseCommand):
help = '''Run theme's ./scripts/edraak_i18n_theme_push.sh'''
@staticmethod
def remove_ignored_messages(theme_root):
theme_pofile = theme_root / 'conf/locale/en/LC_MESSAGES/edraak-platform-2015-theme.po'
theme_po = polib.pofile(theme_pofile)
# `reversed()` is used to allow removing from the bottom
# instead of changing the index and introducing bugs
for entry in reversed(theme_po):
if 'edraak-ignore' in entry.comment.lower():
theme_po.remove(entry)
print 'Removed ignored translation: ', entry.msgid, '=>', entry.msgstr
theme_po.save()
@staticmethod
def generate_pofile(theme_root):
mako_pofile_relative = 'conf/locale/en/LC_MESSAGES/mako.po'
mako_pofile = theme_root / mako_pofile_relative
if not mako_pofile.dirname().exists():
os.makedirs(mako_pofile.dirname())
open(mako_pofile, 'w').close() # Make sure the file exists and empty
call([
'pybabel',
'-q', 'extract',
'--mapping=conf/locale/babel_mako.cfg',
'--add-comments', 'Translators:',
'--keyword', 'interpolate',
'.',
'--output={}'.format(mako_pofile_relative),
], cwd=theme_root)
call(['i18n_tool', 'segment', '--config', 'conf/locale/config.yaml', 'en'], cwd=theme_root)
if mako_pofile.exists():
mako_pofile.unlink()
@staticmethod
def transifex_push(theme_root):
call(['tx', 'push', '-l', 'en', '-s', '-r', 'edraak.edraak-platform-2015-theme'], cwd=theme_root)
def handle(self, *args, **options):
if settings.FEATURES.get('USE_CUSTOM_THEME', False) and settings.THEME_NAME:
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
self.generate_pofile(theme_root)
self.remove_ignored_messages(theme_root)
self.transifex_push(theme_root)
else:
print "Error: theme files not found."
print "Are you sure the config is correct? Press <Enter> to continue without theme i18n..."
raw_input()
| agpl-3.0 | -8,983,956,020,593,395,000 | 36.21875 | 105 | 0.607473 | false | 3.745283 | false | false | false |
erijo/py-svndump | svndump/record.py | 1 | 5100 | # Copyright (c) 2012 Erik Johansson <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from .section import *
class Record(object):
def __init__(self, headers):
super(Record, self).__init__()
self.headers = headers
def discard(self):
pass
def write(self, stream):
self.headers.write(stream)
@staticmethod
def read(stream):
headers = HeaderSection.read(stream)
if headers is None:
return None
if NodeRecord.NODE_PATH_HEADER in headers:
return NodeRecord.read(headers, stream)
elif RevisionRecord.REVISION_NUMBER_HEADER in headers:
return RevisionRecord.read(headers, stream)
elif VersionStampRecord.VERSION_HEADER in headers:
return VersionStampRecord.read(headers, stream)
elif UuidRecord.UUID_HEADER in headers:
return UuidRecord.read(headers, stream)
stream.error("unknown record");
class VersionStampRecord(Record):
VERSION_HEADER = "SVN-fs-dump-format-version"
def __init__(self, headers):
super(VersionStampRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return VersionStampRecord(headers)
class UuidRecord(Record):
UUID_HEADER = "UUID"
def __init__(self, headers):
super(UuidRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return UuidRecord(headers)
class RevisionRecord(Record):
REVISION_NUMBER_HEADER = "Revision-number"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
def __init__(self, headers, properties):
super(RevisionRecord, self).__init__(headers)
self.properties = properties
def write(self, stream):
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
self.headers[self.CONTENT_LENGTH] = prop_length
super(RevisionRecord, self).write(stream)
self.properties.write(stream)
stream.writeline()
@staticmethod
def read(headers, stream):
properties = PropertySection.read(stream)
return RevisionRecord(headers, properties)
class NodeRecord(Record):
NODE_PATH_HEADER = "Node-path"
NODE_KIND = "Node-kind"
NODE_ACTION = "Node-action"
NODE_COPYFROM_REV = "Node-copyfrom-rev"
NODE_COPYFROM_PATH = "Node-copyfrom-path"
TEXT_COPY_SOURCE_MD5 = "Text-copy-source-md5"
TEXT_CONTENT_MD5 = "Text-content-md5"
TEXT_CONTENT_LENGTH = "Text-content-length"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
# New in version 3
TEXT_DELTA = "Text-delta"
PROP_DELTA = "Prop-delta"
TEXT_DELTA_BASE_MD5 = "Text-delta-base-md5"
TEXT_DELTA_BASE_SHA1 = "Text-delta-base-sha1"
TEXT_COPY_SOURCE_SHA1 = "Text-copy-source-sha1"
TEXT_CONTENT_SHA1 = "Text-content-sha1"
def __init__(self, headers, properties, content):
super(NodeRecord, self).__init__(headers)
self.properties = properties
self.content = content
def discard(self):
if self.content is not None:
self.content.discard()
def write(self, stream):
prop_length = 0
if self.properties is not None:
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
text_length = 0
if self.content is not None:
text_length = self.content.dump_length()
self.headers[self.TEXT_CONTENT_LENGTH] = text_length
if self.properties is not None or self.content is not None:
self.headers[self.CONTENT_LENGTH] = prop_length + text_length
super(NodeRecord, self).write(stream)
if self.properties is not None:
self.properties.write(stream)
if self.content is not None:
self.content.write(stream)
stream.writeline()
stream.writeline()
@staticmethod
def read(headers, stream):
properties = None
if NodeRecord.PROP_CONTENT_LENGTH in headers:
properties = PropertySection.read(stream)
content = None
if NodeRecord.TEXT_CONTENT_LENGTH in headers:
content = Content.read(
stream, headers[NodeRecord.TEXT_CONTENT_LENGTH])
return NodeRecord(headers, properties, content)
| gpl-3.0 | -8,833,452,493,659,131,000 | 30.875 | 73 | 0.65902 | false | 3.950426 | false | false | false |
izapolsk/integration_tests | cfme/utils/appliance/services.py | 1 | 3411 | import attr
from cfme.utils.appliance.plugin import AppliancePlugin
from cfme.utils.appliance.plugin import AppliancePluginException
from cfme.utils.log import logger_wrap
from cfme.utils.quote import quote
from cfme.utils.wait import wait_for
class SystemdException(AppliancePluginException):
pass
@attr.s
class SystemdService(AppliancePlugin):
unit_name = attr.ib(type=str)
@logger_wrap('SystemdService command runner: {}')
def _run_service_command(
self,
command,
expected_exit_code=None,
unit_name=None,
log_callback=None
):
"""Wrapper around running the command and raising exception on unexpected code
Args:
command: string command for systemd (stop, start, restart, etc)
expected_exit_code: the exit code to expect, otherwise raise
unit_name: optional unit name, defaults to self.unit_name attribute
log_callback: logger to log against
Raises:
SystemdException: When expected_exit_code is not matched
"""
unit = self.unit_name if unit_name is None else unit_name
with self.appliance.ssh_client as ssh:
cmd = 'systemctl {} {}'.format(quote(command), quote(unit))
log_callback('Running {}'.format(cmd))
result = ssh.run_command(cmd,
container=self.appliance.ansible_pod_name)
if expected_exit_code is not None and result.rc != expected_exit_code:
# TODO: Bring back address
msg = 'Failed to {} {}\nError: {}'.format(
command, self.unit_name, result.output)
if log_callback:
log_callback(msg)
else:
self.logger.error(msg)
raise SystemdException(msg)
return result
def stop(self, log_callback=None):
return self._run_service_command(
'stop',
expected_exit_code=0,
log_callback=log_callback
)
def start(self, log_callback=None):
return self._run_service_command(
'start',
expected_exit_code=0,
log_callback=log_callback
)
def restart(self, log_callback=None):
return self._run_service_command(
'restart',
expected_exit_code=0,
log_callback=log_callback
)
def enable(self, log_callback=None):
return self._run_service_command(
'enable',
expected_exit_code=0,
log_callback=log_callback
)
@property
def enabled(self):
return self._run_service_command('is-enabled').rc == 0
@property
def is_active(self):
return self._run_service_command('is-active').rc == 0
@property
def running(self):
return self._run_service_command("status").rc == 0
def wait_for_running(self, timeout=600):
result, wait = wait_for(
lambda: self.running,
num_sec=timeout,
fail_condition=False,
delay=5,
)
return result
def daemon_reload(self, log_callback=None):
"""Call daemon-reload, no unit name for this"""
return self._run_service_command(
command='daemon-reload',
expected_exit_code=0,
unit_name='',
log_callback=log_callback
)
| gpl-2.0 | 2,346,904,480,859,109,000 | 29.72973 | 86 | 0.586045 | false | 4.23201 | false | false | false |
ttrifonov/horizon | horizon/horizon/dashboards/syspanel/users/tables.py | 1 | 4566 | import logging
from django import shortcuts
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from horizon import api
from horizon import tables
LOG = logging.getLogger(__name__)
class CreateUserLink(tables.LinkAction):
name = "create"
verbose_name = _("Create User")
url = "horizon:syspanel:users:create"
attrs = {
"class": "ajax-modal btn small",
}
class EditUserLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit")
url = "horizon:syspanel:users:update"
attrs = {
"class": "ajax-modal",
}
class EnableUsersAction(tables.Action):
name = "enable"
verbose_name = _("Enable")
verbose_name_plural = _("Enable Users")
def allowed(self, request, user):
return not user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
enabled = []
for obj_id in object_ids:
try:
api.keystone.user_update_enabled(request, obj_id, True)
enabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error enabling user: %s") % e)
LOG.exception("Error enabling user.")
if failures:
messages.info(request, _("Enabled the following users: %s")
% ", ".join(enabled))
else:
messages.success(request, _("Successfully enabled users: %s")
% ", ".join(enabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DisableUsersAction(tables.Action):
name = "disable"
verbose_name = _("Disable")
verbose_name_plural = _("Disable Users")
def allowed(self, request, user):
return user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
disabled = []
for obj_id in object_ids:
if obj_id == request.user.id:
messages.info(request, _('You cannot disable the user you are '
'currently logged in as.'))
continue
try:
api.keystone.user_update_enabled(request, obj_id, False)
disabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error disabling user: %s") % e)
LOG.exception("Error disabling user.")
if failures:
messages.info(request, _("Disabled the following users: %s")
% ", ".join(disabled))
else:
if disabled:
messages.success(request, _("Successfully disabled users: %s")
% ", ".join(disabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DeleteUsersAction(tables.DeleteAction):
data_type_singular = _("User")
data_type_plural = _("Users")
def allowed(self, request, datum):
if datum and datum.id == request.user.id:
return False
return True
def delete(self, request, obj_id):
api.keystone.user_delete(request, obj_id)
class UserFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
""" Really naive case-insensitive search. """
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(user):
if q in user.name.lower() or q in user.email.lower():
return True
return False
return filter(comp, users)
class UsersTable(tables.DataTable):
STATUS_CHOICES = (
("true", True),
("false", False)
)
id = tables.Column(_('id'))
name = tables.Column(_('name'))
email = tables.Column(_('email'))
# Default tenant is not returned from Keystone currently.
#default_tenant = tables.Column(_('default_tenant'),
# verbose_name="Default Project")
enabled = tables.Column(_('enabled'),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "users"
verbose_name = _("Users")
row_actions = (EditUserLink, EnableUsersAction, DisableUsersAction,
DeleteUsersAction)
table_actions = (UserFilterAction, CreateUserLink, DeleteUsersAction)
| apache-2.0 | -7,072,450,925,408,468,000 | 31.382979 | 79 | 0.56417 | false | 4.445959 | false | false | false |
OliverWalter/amdtk | amdtk/models/mixture.py | 1 | 3497 |
"""Mixture of distributions/densities."""
import numpy as np
from scipy.misc import logsumexp
from .discrete_latent_model import DiscreteLatentModel
from .dirichlet import Dirichlet
class MixtureStats(object):
"""Sufficient statistics for :class:BayesianMixture`.
Methods
-------
__getitem__(key)
Index operator.
__add__(stats)
Addition operator.
__iadd__(stats)
In-place addition operator.
"""
def __init__(self, P_Z):
self.__stats = P_Z.sum(axis=0)
def __getitem__(self, key):
if type(key) is not int:
raise TypeError()
if key < 0 or key > 2:
raise IndexError
return self.__stats
def __add__(self, other):
new_stats = MixtureStats(len(self.__stats))
new_stats += self
new_stats += other
return new_stats
def __iadd__(self, other):
self.__stats += other.__stats
return self
class BayesianMixture(DiscreteLatentModel):
"""Bayesian mixture of probability distributions (or densities).
The prior is a Dirichlet density.
Attributes
----------
prior : :class:`Dirichlet`
Prior density.
posterior : :class:`Dirichlet`
Posterior density.
Methods
-------
expLogLikelihood(X)
Expected value of the log-likelihood of the data given the
model.
KLPosteriorPrior()
KL divergence between the posterior and the prior densities.
updatePosterior(mixture_stats, pdf_stats)
Update the parameters of the posterior distribution according to
the accumulated statistics.
"""
def __init__(self, alphas, components):
super().__init__(components)
self.prior = Dirichlet(alphas)
self.posterior = Dirichlet(alphas.copy())
def expLogLikelihood(self, X, weight=1.0):
"""Expected value of the log-likelihood of the data given the
model.
Parameters
----------
X : numpy.ndarray
Data matrix of N frames with D dimensions.
weight : float
Scaling weight for the log-likelihood
Returns
-------
E_llh : numpy.ndarray
The expected value of the log-likelihood for each frame.
E_log_P_Z: numpy.ndarray
Probability distribution of the latent states given the
data.
"""
E_log_weights = self.posterior.expLogPi()
E_log_p_X = np.zeros((X.shape[0], self.k))
for i, pdf in enumerate(self.components):
E_log_p_X[:, i] += E_log_weights[i]
E_log_p_X[:, i] += pdf.expLogLikelihood(X)
E_log_p_X[:, i] *= weight
log_norm = logsumexp(E_log_p_X, axis=1)
E_log_P_Z = (E_log_p_X.T - log_norm).T
return log_norm, E_log_P_Z
def KLPosteriorPrior(self):
"""KL divergence between the posterior and the prior densities.
Returns
-------
KL : float
KL divergence.
"""
KL = 0
for component in self.components:
KL += component.KLPosteriorPrior()
return KL + self.posterior.KL(self.prior)
def updatePosterior(self, mixture_stats):
"""Update the parameters of the posterior distribution.
Parameters
----------
mixture_stats : :class:MixtureStats
Statistics of the mixture weights.
"""
self.posterior = self.prior.newPosterior(mixture_stats)
| bsd-2-clause | 1,982,866,318,469,026,800 | 26.535433 | 72 | 0.582213 | false | 4.005727 | false | false | false |
davidbgk/udata | udata/search/commands.py | 1 | 8335 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import sys
import signal
from contextlib import contextmanager
from datetime import datetime
from flask import current_app
from flask_script import prompt_bool
from udata.commands import submanager, IS_INTERACTIVE
from udata.search import es, adapter_catalog
from elasticsearch.helpers import reindex as es_reindex, streaming_bulk
log = logging.getLogger(__name__)
m = submanager(
'search',
help='Search/Indexation related operations',
description='Handle search and indexation related operations'
)
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M'
DEPRECATION_MSG = '{cmd} command will be removed in udata 1.4, use index command instead'
def default_index_name():
'''Build a time based index name'''
return '-'.join([es.index_name, datetime.now().strftime(TIMESTAMP_FORMAT)])
def iter_adapters():
'''Iter over adapter in predictable way'''
adapters = adapter_catalog.values()
return sorted(adapters, key=lambda a: a.model.__name__)
def iter_qs(qs, adapter):
'''Safely iterate over a DB QuerySet yielding ES documents'''
for obj in qs.no_dereference().timeout(False):
if adapter.is_indexable(obj):
try:
doc = adapter.from_model(obj).to_dict(include_meta=True)
yield doc
except Exception as e:
model = adapter.model.__name__
log.error('Unable to index %s "%s": %s', model, str(obj.id),
str(e), exc_info=True)
def iter_for_index(docs, index_name):
'''Iterate over ES documents ensuring a given index'''
for doc in docs:
doc['_index'] = index_name
yield doc
def index_model(index_name, adapter):
''' Indel all objects given a model'''
model = adapter.model
log.info('Indexing {0} objects'.format(model.__name__))
qs = model.objects
if hasattr(model.objects, 'visible'):
qs = qs.visible()
if adapter.exclude_fields:
qs = qs.exclude(*adapter.exclude_fields)
docs = iter_qs(qs, adapter)
docs = iter_for_index(docs, index_name)
for ok, info in streaming_bulk(es.client, docs, raise_on_error=False):
if not ok:
log.error('Unable to index %s "%s": %s', model.__name__,
info['index']['_id'], info['index']['error'])
def disable_refresh(index_name):
'''
Disable refresh to optimize indexing
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
es.indices.put_settings(index=index_name, body={
'index': {
'refresh_interval': '-1'
}
})
def enable_refresh(index_name):
'''
Enable refresh and force merge. To be used after indexing.
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
refresh_interval = current_app.config['ELASTICSEARCH_REFRESH_INTERVAL']
es.indices.put_settings(index=index_name, body={
'index': {'refresh_interval': refresh_interval}
})
es.indices.forcemerge(index=index_name)
def set_alias(index_name, delete=True):
'''
Properly end an indexation by creating an alias.
Previous alias is deleted if needed.
'''
log.info('Creating alias "{0}" on index "{1}"'.format(
es.index_name, index_name))
if es.indices.exists_alias(name=es.index_name):
alias = es.indices.get_alias(name=es.index_name)
previous_indices = alias.keys()
if index_name not in previous_indices:
es.indices.put_alias(index=index_name, name=es.index_name)
for index in previous_indices:
if index != index_name:
es.indices.delete_alias(index=index, name=es.index_name)
if delete:
es.indices.delete(index=index)
else:
es.indices.put_alias(index=index_name, name=es.index_name)
@contextmanager
def handle_error(index_name, keep=False):
'''
Handle errors while indexing.
In case of error, properly log it, remove the index and exit.
If `keep` is `True`, index is not deleted.
'''
# Handle keyboard interrupt
signal.signal(signal.SIGINT, signal.default_int_handler)
signal.signal(signal.SIGTERM, signal.default_int_handler)
has_error = False
try:
yield
except KeyboardInterrupt:
print('') # Proper warning message under the "^C" display
log.warning('Interrupted by signal')
has_error = True
except Exception as e:
log.error(e)
has_error = True
if has_error:
if not keep:
log.info('Removing index %s', index_name)
es.indices.delete(index=index_name)
sys.exit(-1)
@m.option('-t', '--type', dest='doc_type', required=True,
help='Only reindex a given type')
def reindex(doc_type):
'''[DEPRECATED] Reindex models'''
log.warn(DEPRECATION_MSG.format(cmd='reindex'))
index([doc_type], force=True, keep=False)
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-d', '--delete', default=False, action='store_true',
help='Delete previously aliased indices')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Keep index in case of error')
def init(name=None, delete=False, force=False, keep=False):
'''[DEPRECATED] Initialize or rebuild the search index'''
log.warn(DEPRECATION_MSG.format(cmd='init'))
index(name=name, force=force, keep=not delete)
@m.option(dest='models', nargs='*', metavar='model',
help='Model to reindex')
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Do not delete indexes')
def index(models=None, name=None, force=False, keep=False):
'''Initialize or rebuild the search index'''
index_name = name or default_index_name()
doc_types_names = [m.__name__.lower() for m in adapter_catalog.keys()]
models = [model.lower().rstrip('s') for model in (models or [])]
for model in models:
if model not in doc_types_names:
log.error('Unknown model %s', model)
sys.exit(-1)
log.info('Initiliazing index "{0}"'.format(index_name))
if es.indices.exists(index_name):
if IS_INTERACTIVE and not force:
msg = 'Index {0} will be deleted, are you sure?'
delete = prompt_bool(msg.format(index_name))
else:
delete = True
if delete:
es.indices.delete(index_name)
else:
sys.exit(-1)
es.initialize(index_name)
with handle_error(index_name, keep):
disable_refresh(index_name)
for adapter in iter_adapters():
if not models or adapter.doc_type().lower() in models:
index_model(index_name, adapter)
else:
log.info('Copying {0} objects to the new index'.format(
adapter.model.__name__))
# Need upgrade to Elasticsearch-py 5.0.0 to write:
# es.reindex({
# 'source': {'index': es.index_name, 'type': adapter.doc_type()},
# 'dest': {'index': index_name}
# })
#
# http://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.Elasticsearch.reindex
# This method (introduced in Elasticsearch 2.3 but only in Elasticsearch-py 5.0.0)
# triggers a server-side documents copy.
# Instead we use this helper for meant for backward compatibility
# but with poor performance as copy is client-side (scan+bulk)
es_reindex(es.client, es.index_name, index_name, scan_kwargs={
'doc_type': adapter.doc_type()
})
enable_refresh(index_name)
# At this step, we don't want error handler to delete the index
# in case of error
set_alias(index_name, delete=not keep)
| agpl-3.0 | -8,890,075,423,520,776,000 | 34.168776 | 111 | 0.616557 | false | 3.778332 | false | false | false |
DISBi/django-disbi | disbi/disbimodels.py | 1 | 7146 | """
Normal Django models with a few custom options for configuration.
If you have custom model classes that need these options, add them here and
create a child class of the appropriate options class and your custom model class.
"""
# Django
from django.db import models
class Options():
def __init__(self, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.di_display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(*args, **kwargs)
class RelationshipOptions():
def __init__(self, to, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi relationship fields, which have a different
signature than normal fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(to, *args, **kwargs)
class ExcludeOptions(Options):
"""
Adds the `exclude` option, to exclude rows where this field
evaluates to `False`. Should be only used on Bool fields.
"""
def __init__(self, di_exclude=False, di_show=False, di_display_name=None,
di_hr_primary_key=False, di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_exclude = di_exclude
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose,
di_combinable
*args, **kwargs)
class FloatField(Options, models.FloatField):
"""
FloatField with custom DISBi options.
"""
pass
class BigIntegerField(Options, models.BigIntegerField):
"""
BigIntegerField with custom DISBi options.
"""
pass
class BinaryField(Options, models.BinaryField):
"""
BinaryField with custom DISBi options.
"""
pass
class CommaSeparatedIntegerField(Options, models.CommaSeparatedIntegerField):
"""
CommaSeparatedIntegerField with custom DISBi options.
"""
pass
class CharField(Options, models.CharField):
"""
CharField with custom DISBi options.
"""
pass
class DateField(Options, models.DateField):
"""
DateField with custom DISBi options.
"""
pass
class DateTimeField(Options, models.DateTimeField):
"""
DateTimeField with custom DISBi options.
"""
pass
class DecimalField(Options, models.DecimalField):
"""
DecimalField with custom DISBi options.
"""
pass
class DurationField(Options, models.DurationField):
"""
DurationField with custom DISBi options.
"""
pass
class EmailField(Options, models.EmailField):
"""
EmailField with custom DISBi options.
"""
pass
class FileField(Options, models.FileField):
"""
FileField with custom DISBi options.
"""
pass
class FilePathField(Options, models.FilePathField):
"""
FilePathField with custom DISBi options.
"""
pass
class ImageField(Options, models.ImageField):
"""
ImageField with custom DISBi options.
"""
pass
class IntegerField(Options, models.IntegerField):
"""
IntegerField with custom DISBi options.
"""
pass
class GenericIPAddressField(Options, models.GenericIPAddressField):
"""
GenericIPAddressField with custom DISBi options.
"""
pass
class PositiveIntegerField(Options, models.PositiveIntegerField):
"""
PositiveIntegerField with custom DISBi options.
"""
pass
class PositiveSmallIntegerField(Options, models.PositiveSmallIntegerField):
"""
PositiveSmallIntegerField with custom DISBi options.
"""
pass
class SlugField(Options, models.SlugField):
"""
SlugField with custom DISBi options.
"""
pass
class SmallIntegerField(Options, models.SmallIntegerField):
"""
SmallIntegerField with custom DISBi options.
"""
pass
class TextField(Options, models.TextField):
"""
TextField with custom DISBi options.
"""
pass
class TimeField(Options, models.TimeField):
"""
TimeField with custom DISBi options.
"""
pass
class URLField(Options, models.URLField):
"""
URLField with custom DISBi options.
"""
pass
class UUIDField(Options, models.UUIDField):
"""
UUIDField with custom DISBi options.
"""
pass
class ForeignKey(RelationshipOptions, models.ForeignKey):
"""
ForeignKey with custom DISBi options.
"""
pass
class ManyToManyField(RelationshipOptions, models.ManyToManyField):
"""
ManyToManyField with custom DISBi options.
"""
pass
class OneToOneField(RelationshipOptions, models.OneToOneField):
"""
OneToOneField with custom DISBi options.
"""
pass
class NullBooleanField(ExcludeOptions, models.NullBooleanField):
"""
NullBooleanField with custom DISBi and exclude options.
"""
pass
class BooleanField(ExcludeOptions, models.BooleanField):
"""
BooleanField with custom DISBi and exclude options.
"""
pass
class EmptyCharField(Options, models.CharField):
"""
FloatField with custom DISBi options and the option to add an
empty value displayer.
"""
def __init__(self, di_empty=None, di_show=True, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_empty = di_empty
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose, di_combinable,
*args, **kwargs)
| mit | 7,004,359,213,430,258,000 | 23.898955 | 99 | 0.626784 | false | 4.346715 | false | false | false |
openhumanoids/oh-distro | software/ihmc/ihmc_step/translator_ihmc.py | 1 | 14123 | import lcm
import drc
import atlas
import bot_core
import time
import numpy as np
import py_drake_utils as ut
from bdi_step.footsteps import decode_footstep_plan, decode_deprecated_footstep_plan, encode_footstep_plan, FootGoal
from bdi_step.plotting import draw_swing
from bdi_step.utils import Behavior, gl, now_utime
NUM_REQUIRED_WALK_STEPS = 4
PLAN_UPDATE_TIMEOUT = 20 # maximum time allowed between a footstep plan and an 'update' which appends more steps to that plan
# Experimentally determined vector relating BDI's frame for foot position to ours. This is the xyz vector from the position of the foot origin (from drake forwardKin) to the BDI Atlas foot pos estimate, expressed in the frame of the foot.
ATLAS_FRAME_OFFSET = np.array([0.0400, 0.000, -0.0850])
def blank_step_spec():
msg = atlas.behavior_step_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_step_action_t()
return msg
def blank_walk_spec():
msg = atlas.behavior_walk_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_walk_action_t()
return msg
class Mode:
translating = 0
plotting = 1
class IHMCStepTranslator(object):
def __init__(self, mode=Mode.translating, safe=True):
self.mode = mode
self.safe = safe # Don't send atlas behavior commands (to ensure that the robot never starts walking accidentally when running tests)
self.lc = lcm.LCM()
if self.mode == Mode.plotting:
self.gl = gl
else:
self.gl = None
self.bdi_step_queue_in = []
self.delivered_index = None
self.use_spec = True
self.drift_from_plan = np.zeros((3,1))
self.behavior = Behavior.BDI_STEPPING
self.T_local_to_localbdi = bot_core.rigid_transform_t()
self.T_local_to_localbdi.trans = np.zeros(3)
self.T_local_to_localbdi.quat = ut.rpy2quat([0,0,0])
self.last_params = None
self.executing = False
self.last_footstep_plan_time = -np.inf
def handle_bdi_transform(self, channel, msg):
if isinstance(msg, str):
msg = bot_core.rigid_transform_t.decode(msg)
self.T_local_to_localbdi = msg
def handle_footstep_plan(self, channel, msg):
if isinstance(msg, str):
msg = drc.footstep_plan_t.decode(msg)
footsteps, opts = decode_footstep_plan(msg)
self.last_params = msg.params
if len(footsteps) <= 2:
# the first two footsteps are always just the positions of the robot's feet, so a plan of two or fewer footsteps is a no-op
print 'BDI step translator: Empty plan recieved. Not executing.'
return
behavior = opts['behavior']
#if behavior == Behavior.BDI_WALKING:
# # duration = 0.6
# if len(footsteps) < NUM_REQUIRED_WALK_STEPS+2:
# print 'ERROR: Footstep plan must be at least 4 steps for BDI walking translation'
# return
#elif behavior != Behavior.BDI_STEPPING:
# print "BDI step translator: Ignoring footstep plan without BDI_WALKING or BDI_STEPPING behavior"
# return
self.behavior = behavior
now = time.time()
if now - self.last_footstep_plan_time > PLAN_UPDATE_TIMEOUT:
self.executing = False
self.last_footstep_plan_time = now
if self.mode == Mode.plotting:
self.draw(footsteps)
else:
#if not self.executing:
print "Starting new footstep plan"
self.bdi_step_queue_in = footsteps
self.send_params(1)
if not self.safe:
m = "BDI step translator: Steps received; transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
time.sleep(1)
self.executing = True
self.send_behavior()
else:
m = "BDI step translator: Steps received; in SAFE mode; not transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
#else:
# print "Got updated footstep plan"
# if self.bdi_step_queue_in[self.delivered_index-1].is_right_foot == footsteps[0].is_right_foot:
# print "Re-aligning new footsteps to current plan"
# self.bdi_step_queue_in = self.bdi_step_queue_in[:self.delivered_index-1] + footsteps
# else:
# print "Can't align the updated plan to the current plan"
# return
@property
def bdi_step_queue_out(self):
bdi_step_queue_out = [s.copy() for s in self.bdi_step_queue_in]
for step in bdi_step_queue_out:
# Transform to BDI coordinate frame
T1 = ut.mk_transform(step.pos[:3], step.pos[3:])
T2 = ut.mk_transform(self.T_local_to_localbdi.trans, ut.quat2rpy(self.T_local_to_localbdi.quat))
T = T2.dot(T1)
step.pos[:3] = T[:3,3]
step.pos[3:] = ut.rotmat2rpy(T[:3,:3])
self.lc.publish('BDI_ADJUSTED_FOOTSTEP_PLAN', encode_footstep_plan(bdi_step_queue_out, self.last_params).encode())
for step in bdi_step_queue_out:
# Express pos of the center of the foot, as expected by BDI
R = ut.rpy2rotmat(step.pos[3:])
offs = R.dot(ATLAS_FRAME_OFFSET)
# import pdb; pdb.set_trace()
step.pos[:3] += offs
for i in reversed(range(2, len(bdi_step_queue_out))):
bdi_step_queue_out[i].pos[2] -= bdi_step_queue_out[i-1].pos[2]
return [s.to_bdi_spec(self.behavior, j+1) for j, s in enumerate(bdi_step_queue_out[2:])]
def handle_atlas_status(self, channel, msg):
if (not self.executing) or self.mode != Mode.translating:
return
if isinstance(msg, str):
msg = atlas.status_t.decode(msg)
if self.behavior == Behavior.BDI_WALKING:
index_needed = msg.walk_feedback.next_step_index_needed
# if (self.delivered_index + 1) < index_needed <= len(self.bdi_step_queue_in) - 4:
if index_needed <= len(self.bdi_step_queue_in) - 4:
#print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed-1)
else:
self.executing = False
else:
index_needed = msg.step_feedback.next_step_index_needed
if index_needed > 1 and index_needed > self.delivered_index:
# we're starting a new step, so publish the expected double support configuration
self.send_expected_double_support()
# if self.delivered_index < index_needed <= len(self.bdi_step_queue_in) - 2:
if index_needed <= len(self.bdi_step_queue_in) - 2:
# print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed)
else:
print "done executing"
self.executing = False
# Report progress through the footstep plan execution (only when stepping)
progress_msg = drc.footstep_plan_progress_t()
progress_msg.utime = msg.utime
progress_msg.num_steps = len(self.bdi_step_queue_in) - 2
progress_msg.current_step = index_needed - 1
self.lc.publish('FOOTSTEP_PLAN_PROGRESS', progress_msg.encode())
def send_params(self,step_index,force_stop_walking=False):
"""
Publish the next steppping footstep or up to the next 4 walking footsteps as needed.
"""
assert self.mode == Mode.translating, "Translator in Mode.plotting mode is not allowed to send step/walk params"
if self.behavior == Behavior.BDI_WALKING:
walk_param_msg = atlas.behavior_walk_params_t()
walk_param_msg.num_required_walk_steps = NUM_REQUIRED_WALK_STEPS
walk_param_msg.walk_spec_queue = self.bdi_step_queue_out[step_index-1:step_index+3]
walk_param_msg.step_queue = [atlas.step_data_t() for j in range(NUM_REQUIRED_WALK_STEPS)] # Unused
walk_param_msg.use_spec = True
walk_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
walk_param_msg.use_demo_walk = 0
if force_stop_walking:
for step in walk_param_msg.walk_spec_queue:
step.step_index = -1
self.lc.publish('ATLAS_WALK_PARAMS', walk_param_msg.encode())
self.delivered_index = walk_param_msg.walk_spec_queue[0].step_index
#print "Sent walk params for step indices {:d} through {:d}".format(walk_param_msg.walk_spec_queue[0].step_index, walk_param_msg.walk_spec_queue[-1].step_index)
elif self.behavior == Behavior.BDI_STEPPING:
step_param_msg = atlas.behavior_step_params_t()
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
step_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
step_param_msg.use_demo_walk = 0
step_param_msg.use_spec = True
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
if force_stop_walking:
step_param_msg.desired_step_spec.step_index = -1
self.lc.publish('ATLAS_STEP_PARAMS', step_param_msg.encode())
self.delivered_index = step_param_msg.desired_step_spec.step_index
#print "Sent step params for step index {:d}".format(step_param_msg.desired_step_spec.step_index)
else:
raise ValueError("Bad behavior value: {:s}".format(self.behavior))
def send_expected_double_support(self):
"""
Publish the next expected double support configuration as a two-element footstep plan to support continuous replanning mode.
"""
self.lc.publish('NEXT_EXPECTED_DOUBLE_SUPPORT', encode_footstep_plan(self.bdi_step_queue_in[self.delivered_index:self.delivered_index+2], self.last_params).encode())
def send_behavior(self):
command_msg = atlas.behavior_command_t()
command_msg.utime = now_utime()
if self.behavior == Behavior.BDI_STEPPING:
command_msg.command = "step"
elif self.behavior == Behavior.BDI_WALKING:
command_msg.command = "walk"
else:
raise ValueError("Tried to send invalid behavior to Atlas: {:s}".format(self.behavior))
self.lc.publish("ATLAS_BEHAVIOR_COMMAND", command_msg.encode())
def handle_stop_walking(self, channel, msg_data):
"""
Generate a set of footsteps with -1 step indices, which will cause the BDI controller to switch to standing instead of continuing to walk
"""
if self.behavior == Behavior.BDI_WALKING:
n_steps = 6
else:
n_steps = 3
footsteps = [FootGoal(pos=np.zeros((6)),
step_speed=0,
step_height=0,
step_id=0,
pos_fixed=np.zeros((6,1)),
is_right_foot=0,
is_in_contact=0,
bdi_step_duration=0,
bdi_sway_duration=0,
bdi_lift_height=0,
bdi_toe_off=0,
bdi_knee_nominal=0,
bdi_max_body_accel=0,
bdi_max_foot_vel=0,
bdi_sway_end_dist=-1,
bdi_step_end_dist=-1,
terrain_pts=np.matrix([]))] * n_steps
self.bdi_step_queue_in = footsteps
self.send_params(1, force_stop_walking=True)
self.bdi_step_queue_in = [] # to prevent infinite spewing of -1 step indices
self.delivered_index = None
self.executing = False
def run(self):
if self.mode == Mode.translating:
print "IHMCStepTranslator running in robot-side translator mode"
self.lc.subscribe('COMMITTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('STOP_WALKING', self.handle_stop_walking)
else:
print "IHMCStepTranslator running in base-side plotter mode"
self.lc.subscribe('FOOTSTEP_PLAN_RESPONSE', self.handle_footstep_plan)
#self.lc.subscribe('CANDIDATE_BDI_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('BDI_ADJUSTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('ATLAS_STATUS', self.handle_atlas_status)
self.lc.subscribe('LOCAL_TO_LOCAL_BDI', self.handle_bdi_transform)
while True:
self.lc.handle()
def draw(self, footsteps):
"""
Plot a rough guess of each swing foot trajectory, based on the BDI software manual's description of how swing_height and lift_height behave.
"""
for j in range(len(footsteps)-2):
st0 = footsteps[j].to_bdi_spec(self.behavior, 0)
st1 = footsteps[j+2].to_bdi_spec(self.behavior, 0)
is_stepping = self.behavior==Behavior.BDI_STEPPING
if is_stepping:
lift_height = st1.action.lift_height
else:
lift_height = None
draw_swing(self.gl,
st0.foot.position,
st1.foot.position,
st1.action.swing_height,
is_stepping=is_stepping,
lift_height=lift_height)
self.gl.switch_buffer()
| bsd-3-clause | -3,496,072,450,974,365,000 | 45.920266 | 238 | 0.591517 | false | 3.521067 | false | false | false |
albert12132/templar | templar/api/rules/core.py | 1 | 4373 | """
The public API for Templar pre/post-processor rules.
Users can use this module with the following import statement:
from templar.api.rules import core
"""
from templar.exceptions import TemplarError
import re
class Rule:
"""Represents a preprocessor or postprocessor rule. Rules are applied in the order that they
are listed in the Config.
When constructing a rule, the arguments `src` and `dst` are regular expressions; Templar will
only apply a rule if the source and destination of the publishing pipeline match the regexes.
"""
def __init__(self, src=None, dst=None):
if src is not None and not isinstance(src, str):
raise InvalidRule(
"Rule's source pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
if dst is not None and not isinstance(dst, str):
raise InvalidRule(
"Rule's destination pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
self._src_pattern = src
self._dst_pattern = dst
def applies(self, src, dst):
"""Checks if this rule applies to the given src and dst paths, based on the src pattern and
dst pattern given in the constructor.
If src pattern was None, this rule will apply to any given src path (same for dst).
"""
if self._src_pattern and (src is None or re.search(self._src_pattern, src) is None):
return False
elif self._dst_pattern and (dst is None or re.search(self._dst_pattern, dst) is None):
return False
return True
def apply(self, content):
"""Applies this rule to the given content. A rule can do one or more of the following:
- Return a string; this is taken to be the transformed version of content, and will be used
as the new content after applying this rule.
- Modify variables (a dict). Usually, Rules that modify this dictionary will add new
variables. However, a Rule can also delete or update key/value pairs in the dictionary.
"""
raise NotImplementedError
class SubstitutionRule(Rule):
"""An abstract class that represents a rule that transforms the content that is being processed,
based on a regex pattern and a substitution function. The substitution behaves exactly like
re.sub.
"""
pattern = None # Subclasses should override this variable with a string or compiled regex.
def substitute(self, match):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the substitute method to be '
'a valid SubstitutionRule'.format(type(self).__name__))
def apply(self, content):
if isinstance(self.pattern, str):
return re.sub(self.pattern, self.substitute, content)
elif hasattr(self.pattern, 'sub') and callable(self.pattern.sub):
return self.pattern.sub(self.substitute, content)
raise InvalidRule(
"{}'s pattern has type '{}', but expected a string or "
"compiled regex.".format(type(self).__name__, type(self.pattern).__name__))
class VariableRule(Rule):
"""An abstract class that represents a rule that constructs variables given the content. For
VariableRules, the apply method returns a dictionary mapping str -> str instead of returning
transformed content (a string).
"""
def extract(self, content):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the extract method to be '
'a valid VariableRule'.format(type(self).__name__))
def apply(self, content):
variables = self.extract(content)
if not isinstance(variables, dict):
raise InvalidRule(
"{} is a VariableRule, so its extract method should return a dict. Instead, it "
"returned type '{}'".format(type(self).__name__, type(variables).__name__))
return variables
class InvalidRule(TemplarError):
pass
| mit | 298,352,553,187,532,500 | 40.647619 | 100 | 0.645095 | false | 4.707212 | false | false | false |
katakumpo/niceredis | niceredis/client/server.py | 1 | 6771 | # -*- coding: utf-8 *-*
import warnings
from redis.connection import Token
from redis.exceptions import ConnectionError, RedisError
from .base import RedisBase
class ServerCommands(RedisBase):
# SERVER INFORMATION
def bgrewriteaof(self):
"Tell the Redis server to rewrite the AOF file from data in memory."
return self.execute_command('BGREWRITEAOF')
def bgsave(self):
"""
Tell the Redis server to save its data to disk. Unlike save(),
this method is asynchronous and returns immediately.
"""
return self.execute_command('BGSAVE')
def client_kill(self, address):
"Disconnects the client at ``address`` (ip:port)"
return self.execute_command('CLIENT KILL', address)
def client_list(self):
"Returns a list of currently connected clients"
return self.execute_command('CLIENT LIST')
def client_getname(self):
"Returns the current connection name"
return self.execute_command('CLIENT GETNAME')
def client_setname(self, name):
"Sets the current connection name"
return self.execute_command('CLIENT SETNAME', name)
def config_get(self, pattern="*"):
"Return a dictionary of configuration based on the ``pattern``"
return self.execute_command('CONFIG GET', pattern)
def config_set(self, name, value):
"Set config item ``name`` with ``value``"
return self.execute_command('CONFIG SET', name, value)
def config_resetstat(self):
"Reset runtime statistics"
return self.execute_command('CONFIG RESETSTAT')
def config_rewrite(self):
"Rewrite config file with the minimal change to reflect running config"
return self.execute_command('CONFIG REWRITE')
def dbsize(self):
"Returns the number of keys in the current database"
return self.execute_command('DBSIZE')
def debug_object(self, key):
"Returns version specific meta information about a given key"
return self.execute_command('DEBUG OBJECT', key)
def echo(self, value):
"Echo the string back from the server"
return self.execute_command('ECHO', value)
def flushall(self):
"Delete all keys in all databases on the current host"
return self.execute_command('FLUSHALL')
def flushdb(self):
"Delete all keys in the current database"
return self.execute_command('FLUSHDB')
def info(self, section=None):
"""
Returns a dictionary containing information about the Redis server
The ``section`` option can be used to select a specific section
of information
The section option is not supported by older versions of Redis Server,
and will generate ResponseError
"""
if section is None:
return self.execute_command('INFO')
else:
return self.execute_command('INFO', section)
def lastsave(self):
"""
Return a Python datetime object representing the last time the
Redis database was saved to disk
"""
return self.execute_command('LASTSAVE')
def object(self, infotype, key):
"Return the encoding, idletime, or refcount about the key"
return self.execute_command('OBJECT', infotype, key, infotype=infotype)
def ping(self):
"Ping the Redis server"
return self.execute_command('PING')
def save(self):
"""
Tell the Redis server to save its data to disk,
blocking until the save is complete
"""
return self.execute_command('SAVE')
def sentinel(self, *args):
"Redis Sentinel's SENTINEL command."
warnings.warn(
DeprecationWarning('Use the individual sentinel_* methods'))
def sentinel_get_master_addr_by_name(self, service_name):
"Returns a (host, port) pair for the given ``service_name``"
return self.execute_command('SENTINEL GET-MASTER-ADDR-BY-NAME',
service_name)
def sentinel_master(self, service_name):
"Returns a dictionary containing the specified masters state."
return self.execute_command('SENTINEL MASTER', service_name)
def sentinel_masters(self):
"Returns a list of dictionaries containing each master's state."
return self.execute_command('SENTINEL MASTERS')
def sentinel_monitor(self, name, ip, port, quorum):
"Add a new master to Sentinel to be monitored"
return self.execute_command('SENTINEL MONITOR', name, ip, port, quorum)
def sentinel_remove(self, name):
"Remove a master from Sentinel's monitoring"
return self.execute_command('SENTINEL REMOVE', name)
def sentinel_sentinels(self, service_name):
"Returns a list of sentinels for ``service_name``"
return self.execute_command('SENTINEL SENTINELS', service_name)
def sentinel_set(self, name, option, value):
"Set Sentinel monitoring parameters for a given master"
return self.execute_command('SENTINEL SET', name, option, value)
def sentinel_slaves(self, service_name):
"Returns a list of slaves for ``service_name``"
return self.execute_command('SENTINEL SLAVES', service_name)
def shutdown(self):
"Shutdown the server"
try:
self.execute_command('SHUTDOWN')
except ConnectionError:
# a ConnectionError here is expected
return
raise RedisError("SHUTDOWN seems to have failed.")
def slaveof(self, host=None, port=None):
"""
Set the server to be a replicated slave of the instance identified
by the ``host`` and ``port``. If called without arguments, the
instance is promoted to a master instead.
"""
if host is None and port is None:
return self.execute_command('SLAVEOF', Token('NO'), Token('ONE'))
return self.execute_command('SLAVEOF', host, port)
def slowlog_get(self, num=None):
"""
Get the entries from the slowlog. If ``num`` is specified, get the
most recent ``num`` items.
"""
args = ['SLOWLOG GET']
if num is not None:
args.append(num)
return self.execute_command(*args)
def slowlog_len(self):
"Get the number of items in the slowlog"
return self.execute_command('SLOWLOG LEN')
def slowlog_reset(self):
"Remove all items in the slowlog"
return self.execute_command('SLOWLOG RESET')
def time(self):
"""
Returns the server time as a 2-item tuple of ints:
(seconds since epoch, microseconds into this second).
"""
return self.execute_command('TIME')
| mit | 1,597,255,999,482,688,300 | 34.082902 | 79 | 0.640526 | false | 4.304514 | true | false | false |
asdil12/pywikibase | things.py | 1 | 4025 | #!/usr/bin/python2
import re
class BaseValue(object):
def __init__(self, value):
self.value = value
def __str__(self):
return value
def __repr__(self):
return "<%s object: %s>" % (self.__class__.__name__, self.__str__())
def to_value(self):
return self.__str__()
# Note:
# to_value: generates value as expected by set_claim (py obj)
# from_value: expects datavalue.value as provided by get_claims (py obj)
class Property(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("P", ""))
else:
self.id = id
def __str__(self):
return "P%i" % self.id
def to_value(self):
return {"entity-type": "property", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
assert value["entity-type"] == "property"
return cls(value["numeric-id"])
class Item(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("Q", ""))
else:
self.id = id
def __str__(self):
return "Q%i" % self.id
def to_value(self):
return {"entity-type": "item", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
# ok this is ugly...
if value["entity-type"] == "property":
return Property.from_value(value)
assert value["entity-type"] == "item"
return cls(value["numeric-id"])
class String(BaseValue):
def __str__(self):
return self.value
def to_value(self):
return self.value
@classmethod
def from_value(cls, value):
return cls(value)
class Time(BaseValue):
# wikibase uses a datetime format based on ISO8601
# eg: +00000002013-01-01T00:00:00Z
iso8601_re = re.compile(r"(?P<ysign>[\+\-])(?P<year>\d+)-(?P<month>\d+)-(?P<day>\d+)T(?P<hour>\d+):(?P<minute>\d+):(?P<second>\d+)Z")
def __init__(self, time, timezone=0, before=0, after=0, precision=11, calendarmodel="http://www.wikidata.org/entity/Q1985727"):
self.time = time
self.timezone = timezone
self.before = before
self.after = after
self.precision = precision # FIXME: allow string input
self.calendarmodel = calendarmodel
def __str__(self):
return self.to_value()["time"]
def to_value(self):
ysign = '+' if self.time["year"] >= 0 else '-'
value_out = {
"time": ysign + "%(year)011i-%(month)02i-%(day)02iT%(hour)02i:%(minute)02i:%(second)02iZ" % self.time,
"timezone": self.timezone,
"before": self.before,
"after": self.after,
"precision": self.precision,
"calendarmodel": self.calendarmodel,
}
return value_out
@classmethod
def from_value(cls, value):
#FIXME: catch error exception when match is empty - raise proper error
time_raw = Time.iso8601_re.match(value["time"]).groupdict()
value_in = {
"time": {
"year": int("%(ysign)s%(year)s" % time_raw),
"month": int(time_raw["month"]),
"day": int(time_raw["day"]),
"hour": int(time_raw["hour"]),
"minute": int(time_raw["minute"]),
"second": int(time_raw["second"]),
},
"timezone": value["timezone"],
"before": value["before"],
"after": value["after"],
"precision": value["precision"],
"calendarmodel": value["calendarmodel"],
}
return cls(**value_in)
class GlobeCoordinate(BaseValue):
def __init__(self, latitude, longitude, precision=0.000001, globe="http://www.wikidata.org/entity/Q2"):
self.latitude = latitude
self.longitude = longitude
self.precision = precision # in degrees (or fractions of)
self.globe = globe
def __str__(self):
return "%f, %f" % (self.latitude, self.longitude)
def to_value(self):
value_out = {
"latitude": self.latitude,
"longitude": self.longitude,
"precision": self.precision,
"globe": self.globe,
}
return value_out
@classmethod
def from_value(cls, value):
try:
del value['altitude']
except KeyError:
pass
return cls(**value)
# datavalue.type -> type class
types = {
"wikibase-entityid": Item, # or Property
"string": String,
"time": Time,
"globecoordinate": GlobeCoordinate,
}
def thing_from_datavalue(datavalue):
return types[datavalue["type"]].from_value(datavalue["value"])
| gpl-3.0 | -1,003,805,324,486,820,000 | 24.636943 | 134 | 0.643478 | false | 2.877055 | false | false | false |
adrgerez/ardublockly | package/build_pyinstaller.py | 1 | 9203 | #!/usr/bin/env python2
# -*- coding: utf-8 -*- #
#
# Builds the Ardublockly Python portion of the app for Linux or OS X.
#
# Copyright (c) 2015 carlosperate https://github.com/carlosperate/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IMPORTANT: This script is designed to be located one directory level under the
# project root folder.
#
# This script file uses PyInstaller to create a self contained executable
# build of the Ardublockly application.
# It will remove the build folders left from PyInstaller and move the folder
# with the executable application into the project root folder.
#
# Due to all the debugging steps required to get a CI serve running properly
# this script ended up being quite verbose. In might be updated in the future
# to include a -v flag to select a verbose mode.
from __future__ import unicode_literals, print_function
import os
import sys
import shutil
import platform
import subprocess
from glob import glob
spec_coll_name = "server"
if platform.system() == "Darwin":
exec_folder = "arduexec.app"
else:
exec_folder = "arduexec"
py_exec_folder = os.path.join(exec_folder, "server")
script_tag = "[Ardublockly build] "
script_tab = " "
# The project_root_dir depends on the location of this file, so it cannot be
# moved without updating this line
project_root_dir = \
os.path.dirname( # going up 1 level
os.path.dirname(os.path.realpath(__file__))) # folder dir of this
# verbose_print = print if verbose else lambda *a, **k: None
def remove_directory(dir_to_remove):
""" Removes the a given directory. """
if os.path.exists(dir_to_remove):
print(script_tab + "Removing directory %s" % dir_to_remove)
shutil.rmtree(dir_to_remove)
else:
print(script_tab + "Directory %s was not found." % dir_to_remove)
def get_os():
"""
Gets the OS to based on the command line argument of the platform info.
Only possibilities are: "windows", "mac", "linux"
"""
valid_os = ["windows", "linux", "mac"]
print(script_tab + "Checking for command line argument indicated OS:")
if len(sys.argv) > 1:
if sys.argv[1] in valid_os:
# Take the first argument and use it as the os
print(script_tab + "Valid command line argument found: %s" %
sys.argv[1])
return "%s" % sys.argv[1]
else:
print(script_tab + "Invalid command line argument found: %s\n" %
sys.argv[1] + script_tab + "Options available: %s" % valid_os)
print(script_tab + "Valid command line arg not found, checking system.")
os_found = platform.system()
if os_found == "Windows":
raise SystemExit(script_tab + "OS found is: %s\n" % valid_os[0] +
"Exit: This script is not design to run on Windows.")
elif os_found == "Linux":
print(script_tab + "OS found is: %s" % valid_os[1])
return valid_os[1]
elif os_found == "Darwin":
print(script_tab + "OS found is: %s" % valid_os[2])
return valid_os[2]
else:
raise SystemExit("Exit: OS data found is invalid '%s'" % os_found)
def remove_pyinstaller_temps():
"""
Removes the temporary folders created by PyInstaller (dist and build).
"""
remove_directory(os.path.join(os.getcwd(), "dist"))
remove_directory(os.path.join(os.getcwd(), "build"))
def pyinstaller_build():
"""
Launches a subprocess running Python PyInstaller with the spec file from the
package folder. Captures the output streams and checks for errors.
:return: Boolean indicating the success state of the operation.
"""
process_args = [
"python",
"%s" % os.path.join("package", "pyinstaller", "pyinstaller.py"),
"%s" % os.path.join("package", "pyinstaller.spec")]
print(script_tab + "Command: %s" % process_args)
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
if pyinstaller_process.returncode != 0:
print(script_tab + "ERROR: PyInstaller returned with exit code: %s" %
pyinstaller_process.returncode)
return False
return True
def move_executable_folder():
"""
Moves the PyInstaller executable folder from dist to project root.
:return: Boolean indicating the success state of the operation.
"""
original_exec_dir = os.path.join(project_root_dir, "dist", spec_coll_name)
final_exec_dir = os.path.join(project_root_dir, py_exec_folder)
if os.path.exists(original_exec_dir):
print(script_tab + "Moving exec files from %s \n" % original_exec_dir +
script_tab + "to %s" % final_exec_dir)
shutil.move(original_exec_dir, final_exec_dir)
else:
print(script_tab + "ERROR: PyInstaller executable output folder '%s' " %
original_exec_dir + "not found!")
return False
return True
def copy_data_files(os_type):
""" At the moment there are no additional data files required to copy """
pass
def create_shell_file(os_type):
"""
Creates a shell script fil into the project root to be able to easily launch
the Ardublockly application.
The Mac OS X build runs directly from clicking the .app folder, so it no
longer needs a shell script.
"""
shell_text = ""
shell_location = ""
# The script depends on platform
if os_type == "mac":
# There is no need for a shell file in Mac OS X
print(script_tab + "There is no need to create shell file in Mac OS X.")
return
elif os_type == "linux":
shell_text = '#!/bin/bash\n' \
'DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )\n' \
'echo "[Shell Launch Script] Executing from: $DIR"\n' \
'./%s' % os.path.join(exec_folder, "ardublockly")
shell_location = os.path.join(
project_root_dir, "ardublockly_run.sh")
else:
# No other OS expected, so just return. This should never happen
return
try:
print(script_tab + "Creating shell file into %s" % shell_location)
bash_file = open(shell_location, "w")
bash_file.write(shell_text)
bash_file.close()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Shell file to launch the Ardublockly "
"application could not be created.")
# Make shell script executable by launching a subprocess
process_args = ["chmod", "+x", "%s" % shell_location]
print(script_tab + "Command to make executable: %s" % process_args)
try:
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Could not make Shell file executable.")
def build_ardublockly():
print(script_tag + "Build procedure started.")
print(script_tag + "Checking for OS.")
os_type = get_os()
print(script_tag + "Building Ardublockly for %s." % os_type)
print(script_tag + "Project directory is: %s" % project_root_dir)
print(script_tag + "Script working directory: %s" % os.getcwd())
print(script_tag + "Removing PyInstaller old temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Running PyInstaller process.")
success = pyinstaller_build()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting as there was an error in the "
"PyInstaller execution.")
print(script_tag + "Removing old ardublockly executable directory.")
remove_directory(os.path.join(project_root_dir, py_exec_folder))
print(script_tag + "Moving executable folder to project root.")
success = move_executable_folder()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting now as there was an error in "
"the PyInstaller execution.")
print(script_tag + "Coping data files into executable directory.")
copy_data_files(os_type)
print(script_tag + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Creating shell file to easily execute Ardublockly.")
create_shell_file(os_type)
if __name__ == "__main__":
build_ardublockly()
| apache-2.0 | 2,858,567,297,311,880,700 | 36.563265 | 80 | 0.641856 | false | 3.795052 | false | false | false |
iulian787/spack | var/spack/repos/builtin/packages/cctools/package.py | 2 | 4409 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cctools(AutotoolsPackage):
"""The Cooperative Computing Tools (cctools) enable large scale
distributed computations to harness hundreds to thousands of
machines from clusters, clouds, and grids.
"""
homepage = "https://cctools.readthedocs.io"
url = "https://ccl.cse.nd.edu/software/files/cctools-7.1.5-source.tar.gz"
version('7.1.7', sha256='63cbfabe52591d41a1b27040bf27700d2a11b2f30cb2e25132e0016fb1aade03')
version('7.1.5', sha256='c01415fd47a1d9626b6c556e0dc0a6b0d3cd67224fa060cabd44ff78eede1d8a')
version('7.1.3', sha256='b937878ab429dda31bc692e5d9ffb402b9eb44bb674c07a934bb769cee4165ba')
version('7.1.2', sha256='ca871e9fe245d047d4c701271cf2b868e6e3a170e8834c1887157ed855985131')
version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81')
version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2')
version('6.1.1', sha256='97f073350c970d6157f80891b3bf6d4f3eedb5f031fea386dc33e22f22b8af9d')
depends_on('openssl')
depends_on('perl+shared', type=('build', 'run'))
depends_on('python', type=('build', 'run'))
depends_on('readline')
depends_on('gettext') # Corrects python linking of -lintl flag.
depends_on('swig')
# depends_on('xrootd')
depends_on('zlib')
patch('arm.patch', when='target=aarch64:')
patch('cctools_7.0.18.python.patch', when='@7.0.18')
patch('cctools_6.1.1.python.patch', when='@6.1.1')
# Generally SYS_foo is defined to __NR_foo (sys/syscall.h) which
# is then defined to a syscall number (asm/unistd_64.h). Certain
# CentOS systems have SYS_memfd_create defined to
# __NR_memfd_create but are missing the second definition.
# This is a belt and suspenders solution to the problem.
def patch(self):
before = '#if defined(__linux__) && defined(SYS_memfd_create)'
after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)' # noqa: E501
f = 'dttools/src/memfdexe.c'
kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
filter_file(before, after, f, **kwargs)
if self.spec.satisfies('%fj'):
makefiles = ['chirp/src/Makefile', 'grow/src/Makefile']
for m in makefiles:
filter_file('-fstack-protector-all', '', m)
def configure_args(self):
args = []
# make sure we do not pick a python outside spack:
if self.spec.satisfies('@6.1.1'):
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python3-path', self.spec['python'].prefix,
'--with-python-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python-path', 'no',
'--with-python3-path', 'no'
])
else:
# versions 7 and above, where --with-python-path recognized the
# python version:
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python2-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python2-path', 'no',
'--with-python3-path', 'no'
])
# disable these bits
for p in ['mysql', 'xrootd']:
args.append('--with-{0}-path=no'.format(p))
# point these bits at the Spack installations
for p in ['openssl', 'perl', 'readline', 'swig', 'zlib']:
args.append('--with-{0}-path={1}'.format(p, self.spec[p].prefix))
return args
| lgpl-2.1 | -6,209,909,144,982,183,000 | 42.653465 | 113 | 0.585847 | false | 3.258684 | false | false | false |
fauskanger/Pretreat | app/classes/graph/path.py | 1 | 1741 | from app.config import config
from app.pythomas import shapes as shapelib
from app.pythomas import pythomas as lib
class Path:
def __init__(self, path_nodes):
path_nodes = None if path_nodes == [None] else path_nodes
self.nodes = [] if not path_nodes else path_nodes
self.complete = False
if path_nodes:
self.complete = True
def __add__(self, other):
if self.last() is other.first():
if len(other.nodes) > 1:
return Path(self.nodes + other.nodes[1:])
return self.copy()
else:
return Path(self.nodes + other.nodes)
def add_node(self, node, index=None):
if node in self.nodes:
return False
if index is None:
self.nodes.append(node)
else:
self.nodes.insert(index, node)
return True
def remove_node(self, node):
return lib.try_remove(self.nodes, node)
def update(self, dt):
pass
def draw(self, batch=None):
pass
def delete(self):
self.nodes.clear()
def get_edge_list(self):
nodes = self.get_node_list()
edges = []
for i in range(1, self.get_count()):
edges.append((nodes[i-1], nodes[i]))
return edges
def first(self):
if not self.nodes:
return None
return self.nodes[0]
def last(self):
if not self.nodes:
return None
return self.nodes[-1]
def has_node(self, node):
return node in self.get_node_list()
def get_node_list(self):
return self.nodes
def get_count(self):
return len(self.nodes)
def copy(self):
return Path(self.nodes)
| gpl-2.0 | 7,555,444,032,393,306,000 | 23.180556 | 65 | 0.556577 | false | 3.817982 | false | false | false |
lmazuel/azure-sdk-for-python | azure-mgmt-authorization/azure/mgmt/authorization/models/role_assignment_create_parameters.py | 1 | 1625 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RoleAssignmentCreateParameters(Model):
"""Role assignment create parameters.
:param role_definition_id: The role definition ID used in the role
assignment.
:type role_definition_id: str
:param principal_id: The principal ID assigned to the role. This maps to
the ID inside the Active Directory. It can point to a user, service
principal, or security group.
:type principal_id: str
:param can_delegate: The delgation flag used for creating a role
assignment
:type can_delegate: bool
"""
_attribute_map = {
'role_definition_id': {'key': 'properties.roleDefinitionId', 'type': 'str'},
'principal_id': {'key': 'properties.principalId', 'type': 'str'},
'can_delegate': {'key': 'properties.canDelegate', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(RoleAssignmentCreateParameters, self).__init__(**kwargs)
self.role_definition_id = kwargs.get('role_definition_id', None)
self.principal_id = kwargs.get('principal_id', None)
self.can_delegate = kwargs.get('can_delegate', None)
| mit | -6,886,032,258,312,350,000 | 39.625 | 84 | 0.622154 | false | 4.391892 | false | false | false |
lucidmotifs/newtopia | newtopia/ntgame/models/effect.py | 1 | 3504 | # python modules
from enum import Enum
# django modules
from django.db import models
# nt modules
from .province import Province
from .infrastructure import Building
# meta
from ntmeta.models import Entity
class Effect(models.Model):
""" The core component of province change """
""" e.g. Peasant Growth - would signify that applying this effect,
with a given magnitude would impact how fast peasants grow per turn."""
name = models.CharField(max_length=40, unique=False)
""" The entity that generated the effect """
entity = models.ForeignKey(Entity,
on_delete=models.CASCADE,
null=False,
blank=False)
""" Code used to identify the effect, like a key. HASH? """
tag = models.CharField(max_length=40, unique=True)
def __str__(self):
return self.name
class Instance(models.Model):
""" An instance of an effect that can be applied to a building or spell. """
class EffectType(Enum):
DELAYED = 1
IMMEDIATE = 2
OVER_TIME = 3
NEXT_TURN = 4
""" The related effect """
effect = models.ForeignKey(Effect,
on_delete=models.CASCADE,
null=False,
blank=False)
""" Determines the type of application produced """
effect_type = models.IntegerField(
choices=EffectType.__members__.items(),
default=EffectType.IMMEDIATE)
""" How long effect persists. Ignore when `effect_type` is immediate and
determines when the delayed effect pops when `effect_type` is
DELAYED. Measured in ntdays """
duration = models.IntegerField(default=1)
""" Size of the effect. Set to 100 if using raw value. """
magnitude = models.FloatField(default=0.0)
""" Raw value increase/decrease will be converted to a percentage
if used with a subentity, such as a growth rate.
When Provided, magnitude will only be applied to the raw_value.
Exception: can be used as minimum value if base_is_min == True """
base_value = models.IntegerField(default=None)
""" When True, magnitude works as usual, and base_value is only applied if
the resulting Application value would be less than the base_value """
base_is_min = models.BooleanField(default=False)
""" Denotes negative or positive version of effect """
is_negative = models.BooleanField(default=False)
def apply(self, province):
app = Application()
app.instance = self
app.province = province
def __str__(self):
return "{} with mag. {}".format(self.effect.name, self.magnitude)
EffectType = Instance.EffectType
class Application(models.Model):
""" Used to apply effects to provinces """
instance = models.ForeignKey(Instance,
on_delete=models.CASCADE,
null=True,
blank=True)
applied_to = models.ForeignKey(
Province, on_delete=models.CASCADE, null=False, blank=False,
related_name='to')
applied_by = models.ForeignKey(
Province, on_delete=models.CASCADE, null=False, blank=False,
related_name='by')
""" Type of effect; alters how the effect is applied. """
# Round the effect was applied (ntdate)
applied_on = models.IntegerField()
# Round the effect expires (ntdate) (NULL permanent, immediate)
expires_at = models.IntegerField(default=None)
# Round the effect is applied (ntdate)
# (NULL immediate, 0 every tick till expires)
applies_at = models.IntegerField(default=None)
| gpl-3.0 | 7,361,919,509,951,882,000 | 30.854545 | 80 | 0.664669 | false | 4.050867 | false | false | false |
pidydx/grr | grr/lib/log_test.py | 1 | 2267 | #!/usr/bin/env python
"""Tests for logging classes."""
import logging
import time
from werkzeug import wrappers as werkzeug_wrappers
from grr.gui import wsgiapp
from grr.lib import flags
from grr.lib import log
from grr.lib import stats
from grr.lib import test_lib
from grr.lib import utils
from grr.proto import jobs_pb2
class ApplicationLoggerTests(test_lib.GRRBaseTest):
"""Store tests."""
def Log(self, msg, *args):
if args:
self.log += msg % (args)
else:
self.log += msg
def setUp(self):
super(ApplicationLoggerTests, self).setUp()
self.l = log.GrrApplicationLogger()
self.log = ""
self.log_stubber = utils.Stubber(logging, "info", self.Log)
self.log_stubber.Start()
def tearDown(self):
super(ApplicationLoggerTests, self).tearDown()
self.log_stubber.Stop()
def testGetEventId(self):
self.assertGreater(
len(self.l.GetNewEventId()), 20, "Invalid event ID generated")
self.assertGreater(
len(self.l.GetNewEventId(int(time.time() * 1e6))), 20,
"Invalid event ID generated")
def testLogHttpAdminUIAccess(self):
stats.STATS.RegisterCounterMetric("grr_gin_request_count")
request = wsgiapp.HttpRequest({
"wsgi.url_scheme": "http",
"SERVER_NAME": "foo.bar",
"SERVER_PORT": "1234"
})
request.user = "testuser"
response = werkzeug_wrappers.Response(
status=202,
headers={"X-GRR-Reason": "foo/test1234",
"X-API-Method": "TestMethod"})
self.l.LogHttpAdminUIAccess(request, response)
self.assertIn("foo/test1234", self.log)
def testLogHttpFrontendAccess(self):
request = self._GenHttpRequestProto()
self.l.LogHttpFrontendAccess(request)
self.assertIn("/test?omg=11%45x%20%20", self.log)
def _GenHttpRequestProto(self):
"""Create a valid request object."""
request = jobs_pb2.HttpRequest()
request.source_ip = "127.0.0.1"
request.user_agent = "Firefox or something"
request.url = "http://test.com/test?omg=11%45x%20%20"
request.user = "anonymous"
request.timestamp = int(time.time() * 1e6)
request.size = 1000
return request
def main(argv):
test_lib.main(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 | 3,389,963,987,248,417,000 | 23.912088 | 70 | 0.664314 | false | 3.440061 | true | false | false |
suutari/shoop | shuup/simple_supplier/models.py | 1 | 3252 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from enumfields import EnumIntegerField
from shuup.core.fields import MoneyValueField, QuantityField
from shuup.core.suppliers.enums import StockAdjustmentType
from shuup.utils.properties import PriceProperty
def _get_currency():
from shuup.core.models import Shop
if not settings.SHUUP_ENABLE_MULTIPLE_SHOPS:
return Shop.objects.first().currency
return settings.SHUUP_HOME_CURRENCY
def _get_prices_include_tax():
from shuup.core.models import Shop
if not settings.SHUUP_ENABLE_MULTIPLE_SHOPS:
return Shop.objects.first().prices_include_tax
return False
class StockAdjustment(models.Model):
product = models.ForeignKey("shuup.Product", related_name="+", on_delete=models.CASCADE, verbose_name=_("product"))
supplier = models.ForeignKey("shuup.Supplier", on_delete=models.CASCADE, verbose_name=_("supplier"))
created_on = models.DateTimeField(auto_now_add=True, editable=False, db_index=True, verbose_name=_("created on"))
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.PROTECT, verbose_name=_("created by"))
delta = QuantityField(default=0, verbose_name=_("delta"))
purchase_price_value = MoneyValueField(default=0)
purchase_price = PriceProperty("purchase_price_value", "currency", "includes_tax")
type = EnumIntegerField(
StockAdjustmentType, db_index=True, default=StockAdjustmentType.INVENTORY, verbose_name=_("type"))
@cached_property
def currency(self):
return _get_currency()
@cached_property
def includes_tax(self):
return _get_prices_include_tax()
class StockCount(models.Model):
alert_limit = QuantityField(default=0, editable=False, verbose_name=_("alert limit"))
product = models.ForeignKey(
"shuup.Product", related_name="+", editable=False, on_delete=models.CASCADE, verbose_name=_("product"))
supplier = models.ForeignKey(
"shuup.Supplier", editable=False, on_delete=models.CASCADE, verbose_name=_("supplier"))
logical_count = QuantityField(default=0, editable=False, verbose_name=_("logical count"))
physical_count = QuantityField(default=0, editable=False, verbose_name=_("physical count"))
stock_value_value = MoneyValueField(default=0)
stock_value = PriceProperty("stock_value_value", "currency", "includes_tax")
stock_unit_price = PriceProperty("stock_unit_price_value", "currency", "includes_tax")
class Meta:
unique_together = [("product", "supplier")]
@cached_property
def currency(self):
return _get_currency()
@cached_property
def includes_tax(self):
return _get_prices_include_tax()
@property
def stock_unit_price_value(self):
return (self.stock_value_value / self.logical_count if self.logical_count else 0)
| agpl-3.0 | 9,174,615,948,482,939,000 | 40.164557 | 119 | 0.718635 | false | 3.737931 | false | false | false |
pitunti/alfaPitunti | plugin.video.alfa/channels/seriesblanco.py | 1 | 13145 | # -*- coding: utf-8 -*-
import re
import urlparse
from channels import filtertools
from channelselector import get_thumb
from core import httptools
from core import scrapertoolsV2
from core import servertools
from core.item import Item
from platformcode import config, logger
from channels import autoplay
HOST = "https://seriesblanco.com/"
IDIOMAS = {'es': 'Español', 'en': 'Inglés', 'la': 'Latino', 'vo': 'VO', 'vos': 'VOS', 'vosi': 'VOSI', 'otro': 'OVOS'}
list_idiomas = IDIOMAS.values()
list_language = ['default']
CALIDADES = ['SD', 'HDiTunes', 'Micro-HD-720p', 'Micro-HD-1080p', '1080p', '720p']
list_quality = CALIDADES
list_servers = ['streamix',
'powvideo',
'streamcloud',
'openload',
'flashx',
'streamplay',
'nowvideo',
'gamovideo',
'kingvid',
'vidabc'
]
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
thumb_series_az = get_thumb("channels_tvshow_az.png")
thumb_buscar = get_thumb("search.png")
itemlist = list()
autoplay.init(item.channel, list_servers, list_quality)
itemlist.append(Item(channel=item.channel, title="Listado alfabético", action="series_listado_alfabetico",
thumbnail=thumb_series_az))
itemlist.append(Item(channel=item.channel, title="Todas las series", action="series",
url=urlparse.urljoin(HOST, "listado/"), thumbnail=thumb_series))
itemlist.append(
Item(channel=item.channel, title="Capítulos estrenados recientemente", action="home_section",
extra="Series Online : Capítulos estrenados recientemente",
url=HOST, thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Series más vistas", action="series", extra="Series Más vistas",
url=urlparse.urljoin(HOST, "listado-visto/"), thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Últimas fichas creadas", action="series",
url=urlparse.urljoin(HOST, "fichas_creadas/"), thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Series por género", action="generos",
url=HOST, thumbnail=thumb_series))
itemlist.append(
Item(channel=item.channel, title="Buscar...", action="search", url=urlparse.urljoin(HOST, "finder.php"),
thumbnail=thumb_buscar))
itemlist = filtertools.show_option(itemlist, item.channel, list_idiomas, CALIDADES)
autoplay.show_option(item.channel, itemlist)
return itemlist
def home_section(item):
logger.info("section = %s" % item.extra)
pattern = "['\"]panel-title['\"]>[^/]*%s(.*?)(?:panel-title|\Z)" % item.extra
# logger.debug("pattern = %s" % pattern)
data = httptools.downloadpage(item.url).data
result = re.search(pattern, data, re.MULTILINE | re.DOTALL)
if result:
# logger.debug("found section: {0}".format(result.group(1)))
item.extra = 1
return extract_series_from_data(item, result.group(1))
logger.debug("No match")
return []
def extract_series_from_data(item, data):
itemlist = []
episode_pattern = re.compile('/capitulo-([0-9]+)/')
shows = re.findall("<a.+?href=['\"](?P<url>/serie[^'\"]+)[^<]*<img[^>]*src=['\"](?P<img>http[^'\"]+).*?"
"(?:alt|title)=['\"](?P<name>[^'\"]+)", data)
for url, img, name in shows:
try:
name.decode('utf-8')
except UnicodeError:
name = unicode(name, "iso-8859-1", errors="replace").encode("utf-8")
# logger.debug("Show found: %s -> %s (%s)" % (name, url, img))
if not episode_pattern.search(url):
action = "episodios"
else:
action = "findvideos"
context1=[filtertools.context(item, list_idiomas, CALIDADES), autoplay.context]
itemlist.append(item.clone(title=name, url=urlparse.urljoin(HOST, url),
action=action, show=name,
thumbnail=img,
context=context1))
more_pages = re.search('pagina=([0-9]+)">>>', data)
if more_pages:
# logger.debug("Adding next page item")
itemlist.append(item.clone(title="Siguiente >>", extra=item.extra + 1))
if item.extra > 1:
# logger.debug("Adding previous page item")
itemlist.append(item.clone(title="<< Anterior", extra=item.extra - 1))
return itemlist
def series(item):
logger.info()
if not hasattr(item, 'extra') or not isinstance(item.extra, int):
item.extra = 1
if '?' in item.url:
merger = '&'
else:
merger = '?'
page_url = "%s%spagina=%s" % (item.url, merger, item.extra)
logger.info("url = %s" % page_url)
data = scrapertoolsV2.decodeHtmlentities(httptools.downloadpage(page_url).data)
return extract_series_from_data(item, data)
def series_listado_alfabetico(item):
logger.info()
return [item.clone(action="series", title=letra, url=urlparse.urljoin(HOST, "listado-%s/" % letra))
for letra in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
def generos(item):
logger.info()
data = httptools.downloadpage(item.url).data
result = re.findall("href=['\"](?P<url>/listado/[^'\"]+)['\"][^/]+/i>\s*(?P<genero>[^<]+)", data)
return [item.clone(action="series", title=genero, url=urlparse.urljoin(item.url, url)) for url, genero in result]
def newest(categoria):
logger.info("categoria: %s" % categoria)
itemlist = []
try:
if categoria == 'series':
itemlist = home_section(Item(extra=CAPITULOS_DE_ESTRENO_STR, url=HOST))
# Se captura la excepción, para no interrumpir al canal novedades si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
return itemlist
def search(item, texto):
logger.info("%s" % texto)
texto = texto.replace(" ", "+")
itemlist = []
try:
post = "query=%s" % texto
data = httptools.downloadpage(item.url, post=post).data
data = re.sub(r"\n|\r|\t|\s{2}", "", data)
shows = re.findall("<a href=['\"](?P<url>/serie[^'\"]+)['\"].*?<img src=['\"](?P<img>[^'\"]+)['\"].*?"
"id=['\"]q2[1\"] name=['\"]q2['\"] value=['\"](?P<title>.*?)['\"]", data)
for url, img, title in shows:
itemlist.append(item.clone(title=title, url=urlparse.urljoin(HOST, url), action="episodios", show=title,
thumbnail=img, context=filtertools.context(item, list_idiomas, CALIDADES)))
# Se captura la excepción, para no interrumpir al buscador global si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return itemlist
def episodios(item):
logger.info("%s - %s" % (item.title, item.url))
itemlist = []
# Descarga la página
data = httptools.downloadpage(item.url).data
fanart = scrapertoolsV2.find_single_match(data, "background-image[^'\"]+['\"]([^'\"]+)")
plot = scrapertoolsV2.find_single_match(data, "id=['\"]profile2['\"]>\s*(.*?)\s*</div>")
# logger.debug("fanart: %s" % fanart)
# logger.debug("plot: %s" % plot)
episodes = re.findall("<tr.*?href=['\"](?P<url>[^'\"]+).+?>(?P<title>.+?)</a>.*?<td>(?P<flags>.*?)</td>", data,
re.MULTILINE | re.DOTALL)
for url, title, flags in episodes:
title = re.sub("<span[^>]+>", "", title).replace("</span>", "")
idiomas = " ".join(["[%s]" % IDIOMAS.get(language, "OVOS") for language in
re.findall("banderas/([^\.]+)", flags, re.MULTILINE)])
filter_lang = idiomas.replace("[", "").replace("]", "").split(" ")
display_title = "%s - %s %s" % (item.show, title, idiomas)
# logger.debug("Episode found %s: %s" % (display_title, urlparse.urljoin(HOST, url)))
itemlist.append(item.clone(title=display_title, url=urlparse.urljoin(HOST, url),
action="findvideos", plot=plot, fanart=fanart, language=filter_lang))
itemlist = filtertools.get_links(itemlist, item, list_idiomas, CALIDADES)
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(
item.clone(title="Añadir esta serie a la videoteca", action="add_serie_to_library", extra="episodios"))
return itemlist
def parse_videos(item, type_str, data):
video_patterns_str = [
'<tr.+?<span>(?P<date>.+?)</span>.*?banderas/(?P<language>[^\.]+).+?href="(?P<link>[^"]+).+?servidores/'
'(?P<server>[^\.]+).*?</td>.*?<td>.*?<span>(?P<uploader>.+?)</span>.*?<span>(?P<quality>.*?)</span>',
'<tr.+?banderas/(?P<language>[^\.]+).+?<td[^>]*>(?P<date>.+?)</td>.+?href=[\'"](?P<link>[^\'"]+)'
'.+?servidores/(?P<server>[^\.]+).*?</td>.*?<td[^>]*>.*?<a[^>]+>(?P<uploader>.+?)</a>.*?</td>.*?<td[^>]*>'
'(?P<quality>.*?)</td>.*?</tr>'
]
for v_pat_str in video_patterns_str:
v_patt_iter = re.compile(v_pat_str, re.MULTILINE | re.DOTALL).finditer(data)
itemlist = []
for vMatch in v_patt_iter:
v_fields = vMatch.groupdict()
quality = v_fields.get("quality")
# FIX para veces que añaden el idioma en los comentarios
regex = re.compile('sub-inglés-?', re.I)
quality = regex.sub("", quality)
# quality = re.sub(r"sub-inglés-?", "", quality, flags=re.IGNORECASE)
if not quality:
quality = "SD"
# FIX para los guiones en la calidad y no tener que añadir otra opción en la lista de calidades
if quality.startswith("MicroHD"):
regex = re.compile('microhd', re.I)
quality = regex.sub("Micro-HD-", quality)
# quality = re.sub(r"microhd", "Micro-HD-", quality, flags=re.IGNORECASE)
server = v_fields.get("server")
title = "%s en %s [%s] [%s] (%s: %s)" % (type_str, v_fields.get("server"),
IDIOMAS.get(v_fields.get("language"), "OVOS"), quality,
v_fields.get("uploader"), v_fields.get("date"))
itemlist.append(
item.clone(title=title, fulltitle=item.title, url=urlparse.urljoin(HOST, v_fields.get("link")),
action="play", language=IDIOMAS.get(v_fields.get("language"), "OVOS"),
quality=quality, server= server))
if len(itemlist) > 0:
return itemlist
return []
def extract_videos_section(data):
return re.findall("panel-title[^>]*>\s*([VvDd].+?)</div>[^<]*</div>[^<]*</div>", data, re.MULTILINE | re.DOTALL)
def findvideos(item):
logger.info("%s = %s" % (item.show, item.url))
# Descarga la página
data = httptools.downloadpage(item.url).data
# logger.info(data)
online = extract_videos_section(data)
try:
filtro_enlaces = config.get_setting("filterlinks", item.channel)
except:
filtro_enlaces = 2
list_links = []
if filtro_enlaces != 0:
list_links.extend(parse_videos(item, "Ver", online[-2]))
if filtro_enlaces != 1:
list_links.extend(parse_videos(item, "Descargar", online[-1]))
list_links = filtertools.get_links(list_links, item, list_idiomas, CALIDADES)
for i in range(len(list_links)):
a=list_links[i].title
b=a.lstrip('Ver en')
c=b.split('[')
d=c[0].rstrip( )
d=d.lstrip( )
list_links[i].server=d
autoplay.start(list_links, item)
return list_links
def play(item):
logger.info("%s - %s = %s" % (item.show, item.title, item.url))
if item.url.startswith(HOST):
data = httptools.downloadpage(item.url).data
ajax_link = re.findall("loadEnlace\((\d+),(\d+),(\d+),(\d+)\)", data)
ajax_data = ""
for serie, temp, cap, linkID in ajax_link:
# logger.debug(
# "Ajax link request: Serie = %s - Temp = %s - Cap = %s - Link = %s" % (serie, temp, cap, linkID))
ajax_data += httptools.downloadpage(
HOST + '/ajax/load_enlace.php?serie=' + serie + '&temp=' + temp + '&cap=' + cap + '&id=' + linkID).data
if ajax_data:
data = ajax_data
patron = "window.location.href\s*=\s*[\"']([^\"']+)'"
url = scrapertoolsV2.find_single_match(data, patron)
else:
url = item.url
itemlist = servertools.find_video_items(data=url)
titulo = scrapertoolsV2.find_single_match(item.fulltitle, "^(.*?)\s\[.+?$")
if titulo:
titulo += " [%s]" % item.language
for videoitem in itemlist:
if titulo:
videoitem.title = titulo
else:
videoitem.title = item.title
videoitem.channel = item.channel
return itemlist
| gpl-3.0 | -4,026,489,724,223,350,000 | 35.870787 | 119 | 0.564909 | false | 3.360471 | false | false | false |
TechAtNYU/feedback-service | feedback.py | 1 | 3796 | import requests
import secrets
import smtplib
headers = {
'content-type': 'application/vnd.api+json',
'accept': 'application/*, text/*',
'authorization': 'Bearer ' + secrets.tnyu_api_key
}
def get_emails(event_id, event_data, eboard_members, attendees):
res = requests.get('https://api.tnyu.org/v3/events/' + event_id +
'?include=attendees', headers=headers, verify=False)
if r.status_code != 200:
return
r = res.json()
event_data.append(r['data'])
for post in r['included']:
if post['attributes'].get('contact'):
if post['attributes']['roles']:
eboard_members.append(post)
else:
attendees.append(post)
def send_emails(event_data, survey_link, eboard_members, attendees):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(secrets.tnyu_email, secrets.tnyu_email_password)
for i, member in enumerate(eboard_members):
msg = "\r\n".join([
'Hi ' + eboard_members[i]['attributes']['name'] + '!\n\n' +
'Thanks for coming out! We are constantly looking to improve ' +
'on our events, and we would really appreciate it if you ' +
'could take two minutes out of your day to fill out our' +
'feedback form. We\'d love to know how we could do better: ' +
survey_link + '?rsvpId=' + eboard_members[i]['id'],
'',
'Filling the form out will give us an idea of how everything ' +
'went and if there was something you really liked about the ' +
'event or something you did not like.\n',
'Feel free to email [email protected] if you have ' +
'other questions or concerns.',
'',
'Thank you,',
'Tech@NYU team'
])
try:
server.sendmail(secrets.tnyu_email, eboard_members[i][
'attributes']['contact']['email'], msg)
except UnicodeEncodeError:
continue
for i, attendee in enumerate(attendees):
msg = "\r\n".join([
"From: " + secrets.tnyu_email,
"To: " + attendees[j]['attributes']['contact']['email'],
"Subject: Thank you for coming to Tech@NYU's " +
event_data[0]['attributes']['title'],
'',
'Hi ' + attendees[j]['attributes']['name'] + '!\n\n' +
'Thanks for coming out! We are constantly looking to improve ' +
'on our events, and we would really appreciate it if you could ' +
' take two minutes out of your day to fill out our feedback ' +
'form. We\'d love to know how we could do better: ' +
survey_link + '?rsvpId=' + attendees[j]['id'],
'',
'Filling the form out will give us an idea of how everything ' +
'went and if there was something you really liked about the ' +
'event or something you did not like.\n',
'Feel free to email [email protected] if you have other ' +
'questions or concerns.',
'',
'Thank you,',
'Tech@NYU team'
])
try:
server.sendmail(secrets.tnyu_email, attendees[j][
'attributes']['contact']['email'], msg)
except UnicodeEncodeError:
continue
server.quit()
def main():
event_id = '5644e5e37af46de029dfb9f9'
eboard_members = []
attendees = []
event_data = []
survey_link = 'https://techatnyu.typeform.com/to/ElE6F5'
print attendees[0]
get_emails(event_id, event_data, eboard_members, attendees)
send_emails(event_data, survey_link, eboard_members, attendees)
main()
| mit | 9,174,346,333,781,503,000 | 35.5 | 78 | 0.560327 | false | 3.758416 | false | false | false |
bobbyrward/fr0st | fr0stlib/gui/utils.py | 1 | 8708 | import wx, os
from functools import partial
from fr0stlib.decorators import *
def LoadIcon(*path):
# Check for an icons dir in app base path first for development
filename = os.path.join(wx.GetApp().AppBaseDir, 'icons', *path) + '.png'
if not os.path.exists(filename):
# Not there, check install path
filename = os.path.join(wx.GetApp().IconsDir, *path) + '.png'
img = wx.Image(filename, type=wx.BITMAP_TYPE_PNG)
img.Rescale(16,16)
return wx.BitmapFromImage(img)
def Box(self, name, *a, **k):
box = wx.StaticBoxSizer(wx.StaticBox(self, -1, name),
k.get('orient', wx.VERTICAL))
box.AddMany(a)
return box
def MakeTCs(self, *a, **k):
fgs = wx.FlexGridSizer(99, 2, 1, 1)
tcs = {}
for i, default in a:
tc = NumberTextCtrl(self, **k)
tc.SetFloat(default)
tcs[i] = tc
fgs.Add(wx.StaticText(self, -1, i.replace("_", " ").title()),
0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
fgs.Add(tc, 0, wx.ALIGN_RIGHT, 5)
return fgs, tcs
class MyChoice(wx.Choice):
def __init__(self, parent, name, d, initial):
self.d = d
choices = sorted(d.iteritems())
wx.Choice.__init__(self, parent, -1, choices=[k for k,_ in choices])
self.SetSelection([v for _,v in choices].index(initial))
def GetFloat(self):
return self.d[self.GetStringSelection()]
class SizePanel(wx.Panel):
def __init__(self, parent, callback=lambda: None):
self.parent = parent
self.keepratio = True
self.callback = callback
wx.Panel.__init__(self, parent, -1)
fgs, tcs = MakeTCs(self, ("width", 512.), ("height", 384.), low=0,
callback=self.SizeCallback)
self.__dict__.update(tcs)
for i in (self.width, self.height):
i.MakeIntOnly()
i.low = 1
ratio = wx.CheckBox(self, -1, "Keep Ratio")
ratio.SetValue(True)
ratio.Bind(wx.EVT_CHECKBOX, self.OnRatio)
box = Box(self, "Size", fgs, ratio)
self.SetSizer(box)
box.Fit(self)
def GetInts(self):
return [int(tc.GetFloat()) for tc in (self.width, self.height)]
def UpdateSize(self, size):
width, height = (float(i) for i in size)
self.width.SetFloat(width)
self.height.SetFloat(height)
self.ratio = width / height
def OnRatio(self, e):
self.keepratio = e.GetInt()
def SizeCallback(self, tc, tempsave=None):
if self.keepratio:
v = tc.GetFloat()
tc.SetInt(v)
if tc == self.width:
w, h = v, v / self.ratio
self.height.SetInt(h)
else:
w, h = v * self.ratio, v
self.width.SetInt(w)
else:
self.ratio = float(self.width.GetFloat()) / self.height.GetFloat()
self.callback()
class NumberTextCtrl(wx.TextCtrl):
low = None
high = None
@BindEvents
def __init__(self, parent, low=None, high=None, callback=None):
self.parent = parent
# Size is set to ubuntu default (75,27), maybe make it 75x21 in win
wx.TextCtrl.__init__(self,parent,-1, size=(75,27))
if (low,high) != (None,None):
self.SetAllowedRange(low, high)
if callback:
self.callback = partial(callback, self)
else:
self.callback = lambda tempsave=None: None
self.HasChanged = False
self.SetFloat(0.0)
def GetFloat(self):
return float(self.GetValue() or "0")
def SetFloat(self, v):
v = self.Checkrange(float(v))
self._value = v
string = ("%.6f" %v).rstrip("0")
if string.endswith("."):
string += "0" # Avoid values like '0.' or '1.'
self.SetValue(string)
def GetInt(self):
return int(self.GetValue() or "0")
def SetInt(self, v):
v = self.Checkrange(int(v))
self._value = v
self.SetValue(str(v))
def MakeIntOnly(self):
self.SetInt(self.GetFloat())
self.SetFloat, self.GetFloat = self.SetInt, self.GetInt
def SetAllowedRange(self, low=None, high=None):
self.low = low
self.high = high
def Checkrange(self, v):
if self.low is not None and v < self.low:
return self.low
elif self.high is not None and v > self.high:
return self.high
return v
@Bind(wx.EVT_MOUSEWHEEL)
def OnMouseWheel(self, evt):
if self.SetFloat == self.SetInt:
return
if evt.CmdDown():
if evt.AltDown():
delta = 0.01
else:
delta = 0.1
elif evt.AltDown():
delta = 0.001
else:
evt.Skip()
return
self.SetFocus() # Makes sure OnKeyUp gets called.
v = self._value + delta * evt.GetWheelRotation() / evt.GetWheelDelta()
self.SetFloat(v)
self.callback(tempsave=False)
self.HasChanged = True
@Bind(wx.EVT_KEY_UP)
def OnKeyUp(self, e):
# TODO: This code is duplicated with the one found in xformeditor.
key = e.GetKeyCode()
if (key == wx.WXK_CONTROL and not e.AltDown()) or (
key == wx.WXK_ALT and not e.ControlDown()):
if self.HasChanged:
if hasattr(self.parent, 'parent') and hasattr(self.parent.parent, 'TreePanel'):
self.parent.parent.TreePanel.TempSave()
self.HasChanged = False
@Bind(wx.EVT_CHAR)
def OnChar(self, event):
key = event.GetKeyCode()
if key in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.OnKillFocus(None)
elif key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or key == wx.WXK_TAB:
event.Skip()
elif chr(key) in "0123456789.-":
event.Skip()
else:
# not calling Skip() eats the event
pass #wx.Bell()
@Bind(wx.EVT_KILL_FOCUS)
def OnKillFocus(self,event):
# cmp done with strings because equal floats can compare differently.
if str(self._value) != self.GetValue():
try:
v = self.GetFloat() # Can raise ValueError
except ValueError:
self.SetFloat(self._value)
return
self.SetFloat(v)
self.callback()
class MultiSliderMixin(object):
"""Class to dynamically create and control sliders."""
_new = None
_changed = False
def __init__(self, *a, **k):
super(MultiSliderMixin, self).__init__(*a, **k)
self.sliders = {}
self.Bind(wx.EVT_IDLE, self.OnIdle)
def MakeSlider(self, name, init, low, high, strictrange=True):
"""Programatically builds stuff."""
tc = NumberTextCtrl(self, callback=self.__callback)
if strictrange:
tc.SetAllowedRange(low, high)
slider = wx.Slider(self, -1, init*100, low*100, high*100,
style=wx.SL_HORIZONTAL
| wx.SL_SELRANGE
)
self.sliders[name] = slider, tc
slider.Bind(wx.EVT_SLIDER, partial(self.OnSlider, tc=tc))
## slider.Bind(wx.EVT_LEFT_DOWN, self.OnSliderDown)
slider.Bind(wx.EVT_LEFT_UP, self.OnSliderUp)
name = name.replace("_", " ").title()
return Box(self, name, tc, (slider, wx.EXPAND), orient=wx.HORIZONTAL)
def UpdateSlider(self, name, val):
slider, tc = self.sliders[name]
slider.SetValue(int(val*100))
tc.SetFloat(val)
def IterSliders(self):
for name, (_, tc) in self.sliders.iteritems():
yield name, tc.GetFloat()
def OnSlider(self, e, tc):
val = e.GetInt()/100.
# Make sure _new is only set when there are actual changes.
if val != tc._value:
self._new = True
tc.SetFloat(str(val))
e.Skip()
## def OnSliderDown(self, e):
## e.Skip()
def OnSliderUp(self, e):
if self._changed:
self.parent.TreePanel.TempSave()
self._changed = False
e.Skip()
def OnIdle(self, e):
if self._new is not None:
self.UpdateFlame()
self._new = None
self._changed = True
def __callback(self, tc, tempsave=True):
self.UpdateFlame()
if tempsave:
self.parent.TreePanel.TempSave()
def UpdateFlame(self):
Abstract
def UpdateView(self):
Abstract
| gpl-3.0 | 4,896,840,407,771,886,000 | 27.090323 | 95 | 0.544442 | false | 3.619285 | false | false | false |
eLRuLL/scrapy | scrapy/http/response/text.py | 1 | 9259 | """
This module implements the TextResponse class which adds encoding handling and
discovering (through HTTP headers) to base Response class.
See documentation in docs/topics/request-response.rst
"""
from contextlib import suppress
from typing import Generator
from urllib.parse import urljoin
import parsel
from w3lib.encoding import (html_body_declared_encoding, html_to_unicode,
http_content_type_encoding, resolve_encoding)
from w3lib.html import strip_html5_whitespace
from scrapy.http import Request
from scrapy.http.response import Response
from scrapy.utils.python import memoizemethod_noargs, to_unicode
from scrapy.utils.response import get_base_url
class TextResponse(Response):
_DEFAULT_ENCODING = 'ascii'
def __init__(self, *args, **kwargs):
self._encoding = kwargs.pop('encoding', None)
self._cached_benc = None
self._cached_ubody = None
self._cached_selector = None
super(TextResponse, self).__init__(*args, **kwargs)
def _set_url(self, url):
if isinstance(url, str):
self._url = to_unicode(url, self.encoding)
else:
super(TextResponse, self)._set_url(url)
def _set_body(self, body):
self._body = b'' # used by encoding detection
if isinstance(body, str):
if self._encoding is None:
raise TypeError('Cannot convert unicode body - %s has no encoding' %
type(self).__name__)
self._body = body.encode(self._encoding)
else:
super(TextResponse, self)._set_body(body)
def replace(self, *args, **kwargs):
kwargs.setdefault('encoding', self.encoding)
return Response.replace(self, *args, **kwargs)
@property
def encoding(self):
return self._declared_encoding() or self._body_inferred_encoding()
def _declared_encoding(self):
return self._encoding or self._headers_encoding() \
or self._body_declared_encoding()
def body_as_unicode(self):
"""Return body as unicode"""
return self.text
@property
def text(self):
""" Body as unicode """
# access self.encoding before _cached_ubody to make sure
# _body_inferred_encoding is called
benc = self.encoding
if self._cached_ubody is None:
charset = 'charset=%s' % benc
self._cached_ubody = html_to_unicode(charset, self.body)[1]
return self._cached_ubody
def urljoin(self, url):
"""Join this Response's url with a possible relative url to form an
absolute interpretation of the latter."""
return urljoin(get_base_url(self), url)
@memoizemethod_noargs
def _headers_encoding(self):
content_type = self.headers.get(b'Content-Type', b'')
return http_content_type_encoding(to_unicode(content_type))
def _body_inferred_encoding(self):
if self._cached_benc is None:
content_type = to_unicode(self.headers.get(b'Content-Type', b''))
benc, ubody = html_to_unicode(content_type, self.body,
auto_detect_fun=self._auto_detect_fun,
default_encoding=self._DEFAULT_ENCODING)
self._cached_benc = benc
self._cached_ubody = ubody
return self._cached_benc
def _auto_detect_fun(self, text):
for enc in (self._DEFAULT_ENCODING, 'utf-8', 'cp1252'):
try:
text.decode(enc)
except UnicodeError:
continue
return resolve_encoding(enc)
@memoizemethod_noargs
def _body_declared_encoding(self):
return html_body_declared_encoding(self.body)
@property
def selector(self):
from scrapy.selector import Selector
if self._cached_selector is None:
self._cached_selector = Selector(self)
return self._cached_selector
def xpath(self, query, **kwargs):
return self.selector.xpath(query, **kwargs)
def css(self, query):
return self.selector.css(query)
def follow(self, url, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None):
# type: (...) -> Request
"""
Return a :class:`~.Request` instance to follow a link ``url``.
It accepts the same arguments as ``Request.__init__`` method,
but ``url`` can be not only an absolute URL, but also
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
See :ref:`response-follow-example` for usage examples.
"""
if isinstance(url, parsel.Selector):
url = _url_from_selector(url)
elif isinstance(url, parsel.SelectorList):
raise ValueError("SelectorList is not supported")
encoding = self.encoding if encoding is None else encoding
return super(TextResponse, self).follow(
url=url,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
def follow_all(self, urls=None, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None,
css=None, xpath=None):
# type: (...) -> Generator[Request, None, None]
"""
A generator that produces :class:`~.Request` instances to follow all
links in ``urls``. It accepts the same arguments as the :class:`~.Request`'s
``__init__`` method, except that each ``urls`` element does not need to be
an absolute URL, it can be any of the following:
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
In addition, ``css`` and ``xpath`` arguments are accepted to perform the link extraction
within the ``follow_all`` method (only one of ``urls``, ``css`` and ``xpath`` is accepted).
Note that when passing a ``SelectorList`` as argument for the ``urls`` parameter or
using the ``css`` or ``xpath`` parameters, this method will not produce requests for
selectors from which links cannot be obtained (for instance, anchor tags without an
``href`` attribute)
"""
arg_count = len(list(filter(None, (urls, css, xpath))))
if arg_count != 1:
raise ValueError('Please supply exactly one of the following arguments: urls, css, xpath')
if not urls:
if css:
urls = self.css(css)
if xpath:
urls = self.xpath(xpath)
if isinstance(urls, parsel.SelectorList):
selectors = urls
urls = []
for sel in selectors:
with suppress(_InvalidSelector):
urls.append(_url_from_selector(sel))
return super(TextResponse, self).follow_all(
urls=urls,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
class _InvalidSelector(ValueError):
"""
Raised when a URL cannot be obtained from a Selector
"""
def _url_from_selector(sel):
# type: (parsel.Selector) -> str
if isinstance(sel.root, str):
# e.g. ::attr(href) result
return strip_html5_whitespace(sel.root)
if not hasattr(sel.root, 'tag'):
raise _InvalidSelector("Unsupported selector: %s" % sel)
if sel.root.tag not in ('a', 'link'):
raise _InvalidSelector("Only <a> and <link> elements are supported; got <%s>" %
sel.root.tag)
href = sel.root.get('href')
if href is None:
raise _InvalidSelector("<%s> element has no href attribute: %s" %
(sel.root.tag, sel))
return strip_html5_whitespace(href)
| bsd-3-clause | -5,294,121,437,932,270,000 | 37.260331 | 102 | 0.587212 | false | 4.131638 | false | false | false |
dianchen96/gym | gym/envs/mujoco/mujoco_env.py | 1 | 9674 | import os
from gym import error, spaces
from gym.utils import seeding
import numpy as np
from os import path
import gym
import six
try:
import mujoco_py
from mujoco_py.mjlib import mjlib
except ImportError as e:
raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
class MujocoEnv(gym.Env):
"""Superclass for all MuJoCo environments.
"""
def __init__(self, model_path, frame_skip):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.viewer = None
# self.camera2 = None
# #import pdb; pdb.set_trace()
# self.camera2 = mujoco_py.MjViewer(init_width=500, init_height=500)
# self.camera2.start()
# self.camera2.set_model(self.model)
# self.camera2_setup()
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
# methods to override:
# ----------------------------
def reset_model(self):
"""
Reset the robot degrees of freedom (qpos and qvel).
Implement this in each subclass.
"""
raise NotImplementedError
def viewer_setup(self):
"""
This method is called when the viewer is initialized and after every reset
Optionally implement this method, if you need to tinker with camera position
and so forth.
"""
pass
# -----------------------------
def _reset(self):
mjlib.mj_resetData(self.model.ptr, self.data.ptr)
ob = self.reset_model()
if self.viewer is not None:
self.viewer.autoscale()
self.viewer_setup()
return ob
def set_state(self, qpos, qvel):
assert qpos.shape == (self.model.nq,) and qvel.shape == (self.model.nv,)
self.model.data.qpos = qpos
self.model.data.qvel = qvel
self.model._compute_subtree() # pylint: disable=W0212
# import pdb; pdb.set_trace()
self.model.forward()
@property
def dt(self):
return self.model.opt.timestep * self.frame_skip
def do_simulation(self, ctrl, n_frames):
self.model.data.ctrl = ctrl
for _ in range(n_frames):
self.model.step()
def _render(self, mode='human', close=False):
if close:
if self.viewer is not None:
self._get_viewer().finish()
self.viewer = None
return
if mode == 'rgb_array':
self._get_viewer().render()
data, width, height = self._get_viewer().get_image()
return np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
elif mode == 'human':
self._get_viewer().loop_once()
def _get_viewer(self):
if self.viewer is None:
self.viewer = mujoco_py.MjViewer()
self.viewer.start()
self.viewer.set_model(self.model)
self.viewer_setup()
return self.viewer
def get_body_com(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.data.com_subtree[idx]
def get_body_comvel(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.body_comvels[idx]
def get_body_xmat(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.data.xmat[idx].reshape((3, 3))
def state_vector(self):
return np.concatenate([
self.model.data.qpos.flat,
self.model.data.qvel.flat
])
class MujocoPixelEnv(MujocoEnv):
def __init__(
self,
model_path,
frame_skip,
width=42,
height=42,
mode="rgb"
):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.width = width
self.height = height
self.mode = mode
self.viewer = None
self.camera2 = None
self.camera2 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera2.start()
self.camera2.set_model(self.model)
self.camera2_setup()
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def camera2_setup(self):
raise NotImplementedError
def _get_obs(self):
camera2_output = None
self.camera2.render()
data, width, height = self.camera2.get_image()
camera2_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera2_output = np.mean(camera2_output, axis=2)[:, :, np.newaxis]
return camera2_output
class MujocoPixel2CamEnv(MujocoEnv):
def __init__(
self,
model_path,
frame_skip,
width=42,
height=42,
mode="rgb"
):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.width = width
self.height = height
self.mode = mode
self.viewer = None
self.camera2 = None
self.camera2 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera2.start()
self.camera2.set_model(self.model)
self.camera2_setup()
self.camera3 = None
self.camera3 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera3.start()
self.camera3.set_model(self.model)
self.camera3_setup()
azimuth = self.camera2.cam.azimuth
self.camera3.cam.azimuth = azimuth + 180
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def camera2_setup(self):
raise NotImplementedError
def camera3_setup(self):
raise NotImplementedError
def _get_obs(self):
camera2_output = None
self.camera2.render()
data, width, height = self.camera2.get_image()
camera2_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera2_output = np.mean(camera2_output, axis=2)[:, :, np.newaxis]
camera3_output = None
self.camera3.render()
data, width, height = self.camera3.get_image()
camera3_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera3_output = np.mean(camera3_output, axis=2)[:, :, np.newaxis]
return np.concatenate([camera2_output, camera3_output], axis=2)
| mit | -2,237,362,221,986,834,000 | 31.354515 | 178 | 0.572256 | false | 3.609701 | false | false | false |
chenlian2015/skia_from_google | tools/skp/page_sets/skia_jsfiddlebigcar_desktop.py | 2 | 1282 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_jsfiddlebigcar_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(5)
class SkiaJsfiddlebigcarDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaJsfiddlebigcarDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_jsfiddlebigcar_desktop.json')
urls_list = [
# Why: Page from Chromium's silk test cases
'http://jsfiddle.net/vBQHH/3/embedded/result/',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
| bsd-3-clause | 5,301,158,227,787,102,000 | 30.268293 | 74 | 0.710608 | false | 3.382586 | false | false | false |
soma0sd/pyNuc | ensdf/dbgen.py | 1 | 2763 | # -*- coding: utf-8 -*-
"""Inner Module Import"""
from ensdf.genlib import files
from ensdf.genlib import regexp
"""Python Packages"""
import pickle
def get_card(ident=''):
data = []
file_list = files.get_all_files()
prog = lambda i: (i+1)*100/len(file_list)
for ix, f in enumerate(file_list):
card = []
for l in f.readlines():
l = l.replace('\n', '')
if l.strip() == '':
if ident in card[0]:
data.append(card)
card = []
else:
card.append(l)
print("\rGet Card... [{:6.2f}%]".format(prog(ix)), end='')
print()
return data
uq = []
def get_ground_level():
global uq
card = get_card("ADOPTED LEVELS")
prog = lambda i: (i+1)*100/len(card)
data = {}
for ic, c in enumerate(card):
for ixl, l1 in enumerate(c):
lv = regexp.re_level_rec(l1)
if lv:
key = regexp.nucid2nucpy(lv['NUCID'])
if key in data.keys():
break
data[key] = {}
data[key]['E'] = lv['E']
data[key]['J'] = lv['J']
data[key]['T'] = lv['T']
data[key]['MODE'] = []
mods = ''
for l2 in c[ixl+1:]:
de = regexp.re_level_decay(l2)
if regexp.re_level_rec(l2):
break
elif de:
mods += de
mode = regexp.mode_parsing(mods, key)
data[key]['MODE'] = mode
print("\rGet Ground level...[{:6.2f}%]".format(prog(ic)), end='')
print()
return data
def get_nist():
import re
data = {}
iso = []
card = []
re_C = re.compile('^[_]+$')
re_i = re.compile('^(.{3}) (.{3}) (.{3}) (.{1,18})[ ]*(.{0,13})')
re_f = re.compile('[\d\.]+')
f = files.get_nist_file()
for l in f.readlines()[3:]:
l = l.replace('\n', '')
if re_C.match(l):
iso.append(card)
card = []
else:
card.append(l)
for c in iso:
m1 = re_i.match(c[0])
main = m1.groups()
Z = int(main[0])
symbol = main[1].strip()
mass = float(re_f.match(main[3]).group(0))
if re_f.match(main[4]):
na = float(re_f.match(main[4]).group(0))
else:
na = 0.0
code = "{:03d}{:03d}".format(Z, int(main[2]))
data[code] = {'SYM': symbol, 'M': mass, 'IS': na}
for cs in c[1:]:
m2 = re_i.match(cs)
sub = m2.groups()
mass = float(re_f.match(sub[3]).group(0))
if re_f.match(sub[4]):
na = float(re_f.match(sub[4]).group(0))
else:
na = 0.0
code = "{:03d}{:03d}".format(Z, int(sub[2]))
data[code] = {'SYM': symbol, 'M': mass, 'IS': na}
data['000001'] = {'SYM': 'n', 'M': 1.008664916, 'IS': 0.0}
return data
data = 0
data = get_ground_level()
nist = get_nist()
f = open('nucinfo.pkl', 'wb')
pickle.dump(data, f)
f = open('nist.pkl', 'wb')
pickle.dump(nist, f)
| mit | -3,026,741,318,854,007,300 | 25.066038 | 69 | 0.500181 | false | 2.771314 | false | false | false |
jbloom/epitopefinder | scripts/epitopefinder_plotdistributioncomparison.py | 1 | 3447 | #!python
"""Script for plotting distributions of epitopes per site for two sets of sites.
Uses matplotlib. Designed to analyze output of epitopefinder_getepitopes.py.
Written by Jesse Bloom."""
import os
import sys
import random
import epitopefinder.io
import epitopefinder.plot
def main():
"""Main body of script."""
random.seed(1) # seed random number generator in case P values are being computed
if not epitopefinder.plot.PylabAvailable():
raise ImportError("Cannot import matplotlib / pylab, which are required by this script.")
# output is written to out, currently set to standard out
out = sys.stdout
out.write("Beginning execution of epitopefinder_plotdistributioncomparison.py\n")
# read input file and parse arguments
args = sys.argv[1 : ]
if len(args) != 1:
raise IOError("Script must be called with exactly one argument specifying the input file")
infilename = sys.argv[1]
if not os.path.isfile(infilename):
raise IOError("Failed to find infile %s" % infilename)
d = epitopefinder.io.ParseInfile(open(infilename))
out.write("\nRead input arguments from %s\n" % infilename)
out.write('Read the following key / value pairs:\n')
for (key, value) in d.iteritems():
out.write("%s %s\n" % (key, value))
plotfile = epitopefinder.io.ParseStringValue(d, 'plotfile').strip()
epitopesbysite1_list = []
epitopesbysite2_list = []
for (xlist, xf) in [(epitopesbysite1_list, 'epitopesfile1'), (epitopesbysite2_list, 'epitopesfile2')]:
epitopesfile = epitopefinder.io.ParseFileList(d, xf)
if len(epitopesfile) != 1:
raise ValueError("%s specifies more than one file" % xf)
epitopesfile = epitopesfile[0]
for line in open(epitopesfile).readlines()[1 : ]:
if not (line.isspace() or line[0] == '#'):
(site, n) = line.split(',')
(site, n) = (int(site), int(n))
xlist.append(n)
if not xlist:
raise ValueError("%s failed to specify information for any sites" % xf)
set1name = epitopefinder.io.ParseStringValue(d, 'set1name')
set2name = epitopefinder.io.ParseStringValue(d, 'set2name')
title = epitopefinder.io.ParseStringValue(d, 'title').strip()
if title.upper() in ['NONE', 'FALSE']:
title = None
pvalue = epitopefinder.io.ParseStringValue(d, 'pvalue')
if pvalue.upper() in ['NONE', 'FALSE']:
pvalue = None
pvaluewithreplacement = None
else:
pvalue = int(pvalue)
pvaluewithreplacement = epitopefinder.io.ParseBoolValue(d, 'pvaluewithreplacement')
if pvalue < 1:
raise ValueError("pvalue must be >= 1")
if len(epitopesbysite2_list) >= len(epitopesbysite1_list):
raise ValueError("You cannot use pvalue since epitopesbysite2_list is not a subset of epitopesbysite1_list -- it does not contain fewer sites with specified epitope counts.")
ymax = None
if 'ymax' in d:
ymax = epitopefinder.io.ParseFloatValue(d, 'ymax')
out.write('\nNow creating the plot file %s\n' % plotfile)
epitopefinder.plot.PlotDistributionComparison(epitopesbysite1_list, epitopesbysite2_list, set1name, set2name, plotfile, 'number of epitopes', 'fraction of sites', title, pvalue, pvaluewithreplacement, ymax=ymax)
out.write("\nScript is complete.\n")
if __name__ == '__main__':
main() # run the script
| gpl-3.0 | -4,142,234,527,212,145,000 | 43.766234 | 215 | 0.668407 | false | 3.416254 | false | false | false |
dazult/EPA-2012-Residential-Exposure-SOPs | sop_calcs/forms.py | 1 | 76603 | from __future__ import absolute_import
import copy
import datetime
from itertools import chain
from urlparse import urljoin
from django.conf import settings
from django.forms.util import flatatt, to_current_timezone
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.html import escape, conditional_escape
from django.utils.translation import ugettext, ugettext_lazy
from django.utils.encoding import StrAndUnicode, force_unicode
from django.utils.safestring import mark_safe
from django.utils import datetime_safe, formats
from django import forms
import json
from collections import defaultdict
import operator
class CheckboxSelectMultipleBootstrap(forms.SelectMultiple):
def __init__(self,attrs=None, choices=()):
super(CheckboxSelectMultipleBootstrap, self).__init__(attrs, choices)
self.choices_attrs = {}
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<div>']
# Normalize to strings
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
choice_attrs = copy.copy(final_attrs)
if option_value in self.choices_attrs:
choice_attrs.update(self.choices_attrs[option_value])
cb = forms.CheckboxInput(choice_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<div><label%s class="checkbox inline">%s %s</label></div>' % (label_for, rendered_cb, option_label))
output.append(u'</div>')
return mark_safe(u'\n'.join(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
class RadioFieldBootstrapRenderer(forms.widgets.RadioSelect.renderer):
def render(self):
"""
Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
id_ = self.attrs.get('id', None)
start_tag = '<div id="%s" class="radio inline">'% id_ if id_ else '<div>'
output = [start_tag]
for widget in self:
output.append(force_unicode(widget))
output.append('</div>')
return mark_safe('\n'.join(output))
class RadioSelectBootstrap(forms.widgets.RadioSelect):
renderer = RadioFieldBootstrapRenderer
from sop_calcs.gardensandtrees import gardensandtrees
from sop_calcs.treated_pets import treated_pets
from sop_calcs.insect_repellent import insect_repellent
from sop_calcs.lawnturfsop import lawnturfsop
from sop_calcs.general_handler_sop import general_handler_sop
from sop_calcs.paintsop import paintsop
from sop_calcs.impregnated_materials import impregnated_materials
from sop_calcs.outdoor_misting import outdoor_misting, outdoor_misting_handler
from sop_calcs.indoor_envirnoments import indoor
from sop_calcs.exposure_profile import RiskProfile
class ResultsForm(forms.Form):
title = "Assessment Background Information"
def __init__(self,*args,**kwargs):
self.input_data = kwargs.pop('_input_data',None)
super(ResultsForm,self).__init__(*args,**kwargs)
def inputs(self):
return self.input_data
def lifestage_displays(self):
lifestages = {}
lifestages['adult'] = "Adult (All)"
lifestages['adult_general'] = "Adult (All)"
lifestages['adult_female'] = "Adult Female"
lifestages['adult_male'] = "Adult Male"
lifestages['1_to_2'] = "1 < 2 year old"
lifestages['3_to_6'] = "3 < 6 year old"
lifestages['6_to_11'] = "6 < 11 year old"
lifestages['11_to_16'] = "11 < 16 year old"
return lifestages
def results(self):
try:
s = json.loads(self.input_data)
ss = ""
RIs = defaultdict(lambda : defaultdict(list))
exposure_routes = set(s['0']['exposure_routes'])
exposure_scenarios = set(s['0']['exposure_scenarios'])
body_weights_adults_options = [80., 69., 86.] # kg
bodyweight = {}
bodyweight['adult'] = 80.#body_weights_adults_options[0]
bodyweight['adult_general'] = 80.
bodyweight['adult_female'] = 69.
bodyweight['adult_male'] = 86.
pop_selection = "gen"
amended_RIs = {}
for duration in s['0']['exposure_durations']:
amended_RIs[duration] = {}
for target in s['0']['target_population']:
if target == 'adult_female':
pop_selection = "adult_female"
bodyweight['adult'] = bodyweight['adult_female']
elif target == 'adult_male':
pop_selection = "adult_male"
bodyweight['adult'] = bodyweight['adult_male']
else:
pop_selection = "gen"
bodyweight['adult'] = bodyweight['adult_general']
bodyweight['1_to_2'] = 11.
bodyweight['3_to_6'] = 19.
bodyweight['6_to_11'] = 32.
bodyweight['11_to_16'] = 57.
inhalation_rate = {}
inhalation_rate['adult'] = 0.64
inhalation_rate['1_to_2'] = 0.33
inhalation_rate['3_to_6'] = 0.42
SA_BW_ratio = {'1_to_2':640., 'adult':280.}
risk_profile = RiskProfile(exposure_routes)
for duration in s['0']['exposure_durations']:
ss += "<br>%s<br>" % duration
POD = {}
LOC = {}
absorption = {}
POD['dermal'] = s['1']['dermal_%s_%s_POD'%(duration,target)]
LOC['dermal'] = s['1']['dermal_%s_%s_LOC'%(duration,target)]
try:
POD['dermal'] = s['1']['dermal_%s_%s_POD'%(duration,target)]
LOC['dermal'] = s['1']['dermal_%s_%s_LOC'%(duration,target)]
absorption['dermal'] = s['1']['dermal_absorption']
except:
absorption['dermal'] = 1
try:
POD['inhalation'] = s['1']['inhalation_%s_%s_POD'%(duration,target)]
LOC['inhalation'] = s['1']['inhalation_%s_%s_LOC'%(duration,target)]
absorption['inhalation'] = s['1']['inhalation_absorption']
except:
absorption['inhalation'] = 1
try:
POD['oral'] = s['1']['oral_%s_%s_POD'%(duration,target)]
LOC['oral'] = s['1']['oral_%s_%s_LOC'%(duration,target)]
except:
pass
try:
POD['dietary'] = s['1']['dietary_POD']
LOC['dietary'] = s['1']['dietary_LOC']
except:
pass
if s['3'] != None and 'generalhandler' in exposure_scenarios: #generalhandler
SOP = "General Handler"
combining_dermal_inhalation = []
#application_rate[formulation][scenario][application_method][application_type]
application_rate = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
for formulation in GeneralHandlerForm.application_rate_form_map:
for scenario in GeneralHandlerForm.application_rate_form_map[formulation]:
for application_method in GeneralHandlerForm.application_rate_form_map[formulation][scenario]:
for application_type in GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method]:
if GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type] in s['3']:
application_rate[formulation][scenario][application_method][application_type] = s['3'][GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type]]
else:
application_rate[formulation][scenario][application_method][application_type] = 0
results = general_handler_sop(POD, LOC, bodyweight, absorption, application_rate)
risk_profile.update(results, SOP, duration)
if s['4'] != None and 'generalhandler' in exposure_scenarios and 'generalhandler' in exposure_scenarios: #misting - handler
SOP = "General Handler"
OASS_fraction_ai = s['4']['OASS_fraction_ai']
OASS_amount_of_product_in_can = s['4']['OASS_amount_of_product_in_can']
ORMS_drum_size = s['4']['ORMS_drum_size']
ORMS_dilution_rate = s['4']['ORMS_dilution_rate']
ORMS_fraction_ai = s['4']['ORMS_fraction_ai']
AB_drum_size = s['4']['AB_drum_size']
AB_dilution_rate = s['4']['AB_dilution_rate']
AB_fraction_ai = s['4']['AB_fraction_ai']
results = outdoor_misting_handler(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], OASS_fraction_ai, OASS_amount_of_product_in_can, ORMS_drum_size, ORMS_dilution_rate, ORMS_fraction_ai, AB_drum_size, AB_dilution_rate, AB_fraction_ai)
risk_profile.update(results, SOP, duration)
if s['5'] != None and 'treatedpet' in exposure_scenarios: #treatedpet
SOP = "Treated Pets"
ai_amounts = {}
amount_applied_form_map = TreatedPetForm.amount_applied_form_map
for animal in ['cat','dog']:
ai_amounts[animal] = {}
for size in ['small','medium','large']:
ai_amounts[animal][size] = s['5'][TreatedPetForm.amount_applied_form_map[animal][size]]*s['5']['fraction_ai']*1000.
results = treated_pets(POD, LOC, bodyweight, absorption['dermal'], ai_amounts)
risk_profile.update(results, SOP, duration)
if s['6'] != None and 'lawn' in exposure_scenarios: #lawn
SOP = "Lawns and Turf"
fraction_active_ingredient = s['6']['fraction_ai_in_pellets']
ttr = {'liquid':s['6']['liquid_ttr_conc'], 'solid':s['6']['solid_ttr_conc']}
application_rate = {'liquid':s['6']['liquid_application_rate'],'solid':s['6']['solid_application_rate']} # lb ai / acre
results = lawnturfsop(POD, LOC, bodyweight, absorption['dermal'], application_rate, ttr, fraction_active_ingredient)
risk_profile.update(results, SOP, duration)
if s['7'] != None and 'garden' in exposure_scenarios: #gardensandtrees
SOP = "Gardens and Trees"
dfr = {'liquid':s['7']['liquid_dfr_conc'], 'solid':s['7']['solid_dfr_conc']}
application_rate = {'liquid':s['7']['liquid_application_rate'],'solid':s['7']['solid_application_rate']} # lb ai / acre
results = gardensandtrees(POD, LOC, bodyweight, absorption['dermal'], application_rate, dfr)
#return "Here1"
risk_profile.update(results, SOP, duration)
#return exposure_scenarios
if s['8'] != None and 'insect' in exposure_scenarios: #insect
SOP = "Insect Repellents"
amount_ai = defaultdict(lambda : defaultdict(dict))
for sunscreen_status in ['without','with']:
for formulation in InsectRepellentsForm.formulations:
amount_ai[sunscreen_status][formulation] = s['8'][InsectRepellentsForm.amount_ai_formulations_form_map[sunscreen_status][formulation]]
results = insect_repellent(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, amount_ai )
risk_profile.update(results, SOP, duration)
if s['9'] != None and 'paint' in exposure_scenarios: #paint
SOP = "Paint and Preservatives"
surface_residue_concentration = s['9']['surface_residue_concentration']
fraction_of_body_exposed = PaintsAndPreservativesForm.DEFAULT_FRACTION_OF_BODY_EXPOSED#s['9']['fraction_of_body_exposed']
daily_material_to_skin_transfer_efficency = PaintsAndPreservativesForm.DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY#s['9']['daily_material_to_skin_transfer_efficency']
exposure_time = PaintsAndPreservativesForm.EXPOSURE_TIME[s['9']['indoor_or_outdoor']]#s['9']['exposure_time']
hand_to_mouth_event_freqency = PaintsAndPreservativesForm.HAND_TO_MOUTH_EVENTS_PER_HOUR[s['9']['indoor_or_outdoor']]#s['9']['hand_to_mouth_event_frequency']
results = paintsop(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, surface_residue_concentration, fraction_of_body_exposed, daily_material_to_skin_transfer_efficency, exposure_time, hand_to_mouth_event_freqency )
risk_profile.update(results, SOP, duration)
if s['10'] != None and 'impregnated_materials' in exposure_scenarios: #impregnated_materials
SOP = "Impregnated Materials"
surface_residue_concentration = s['10']['surface_residue_concentration']
weight_fraction = s['10']['weight_fraction_of_active_ingredient']
material_type = s['10']['material_type']
if surface_residue_concentration is None or surface_residue_concentration == 0:
surface_residue_concentration = weight_fraction*ImpregnatedMaterialsForm.MATERIAL_WEIGHT_TO_SURFACE_AREA_DENSITY[material_type]
body_fraction_exposed_type = s['10']['body_fraction_exposed_type']
fraction_of_body_exposed = ImpregnatedMaterialsForm.BODY_FRACTION_EXPOSED[body_fraction_exposed_type]#s['10']['fraction_of_body_exposed']
protective_barrier_present = s['10']['protective_barrier_present']
protection_factor = ImpregnatedMaterialsForm.PROTECTION_FACTOR[protective_barrier_present]
#HtM
type_of_flooring = s['10']['type_of_flooring']
fraction_of_ai_transferred_to_hands = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
hand_exposure_time = ImpregnatedMaterialsForm.FLOOR_EXPOSURE_TIME[type_of_flooring]
daily_material_to_skin_transfer_efficency = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
#daily_material_to_skin_transfer_efficency = ImpregnatedMaterialsForm.DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY
indoor_or_outdoor = s['10']['indoor_or_outdoor']
object_exposure_time = ImpregnatedMaterialsForm.EXPOSURE_TIME[indoor_or_outdoor]
hand_to_mouth_event_freqency = ImpregnatedMaterialsForm.HAND_TO_MOUTH_EVENTS_PER_HOUR[indoor_or_outdoor]
#daily_material_to_skin_transfer_efficency = forms.FloatField(required=False,initial=0.14)
#OtM
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
fraction_of_residue_on_object = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
object_to_mouth_event_frequency = ImpregnatedMaterialsForm.OBJECT_TO_MOUTH_EVENTS_PER_HOUR[indoor_or_outdoor]
results = impregnated_materials(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, surface_residue_concentration, fraction_of_body_exposed, daily_material_to_skin_transfer_efficency, protection_factor, fraction_of_ai_transferred_to_hands, hand_exposure_time, hand_to_mouth_event_freqency, fraction_of_residue_on_object, object_exposure_time, object_to_mouth_event_frequency)
risk_profile.update(results, SOP, duration)
if s['11'] != None and 'indoor' in exposure_scenarios: #indoor
SOP = "Indoor"
space_spray_fraction_ai = s['11']['space_spray_fraction_ai']
space_spray_amount_of_product = s['11']['space_spray_amount_of_product']
space_spray_restriction = s['11']['space_spray_restriction']
molecular_weight = s['11']['molecular_weight']
vapor_pressure = s['11']['vapor_pressure']
residues = {}
residues['broadcast'] = s['11']['broadcast_residue']
residues['perimeter/spot/bedbug (coarse)'] = s['11']['coarse_residue']
residues['perimeter/spot/bedbug (pin stream)'] = s['11']['pin_stream_residue']
residues['cracks and crevices'] = s['11']['crack_and_crevice_residue']
residues['foggers'] = s['11']['foggers_residue']
residues['space sprays'] = s['11']['space_sprays_residue']
matress_residue = s['11']['matress_residue']
results = indoor(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], space_spray_fraction_ai, space_spray_amount_of_product, space_spray_restriction, molecular_weight, vapor_pressure,residues,matress_residue)
risk_profile.update(results, SOP, duration)
if s['12'] != None and 'misting' in exposure_scenarios: #misting
SOP = "Misting"
OASS_fraction_ai = s['12']['OASS_fraction_ai']
OASS_amount_of_product_in_can = s['12']['OASS_amount_of_product_in_can']
CCTM_amount_ai_in_product= s['12']['CCTM_amount_ai_in_product']
ORMS_application_rate= s['12']['ORMS_application_rate']
ORMS_dilution_rate= s['12']['ORMS_dilution_rate']
ORMS_fraction_ai= s['12']['ORMS_fraction_ai']
AB_application_rate= s['12']['AB_application_rate']
AB_dilution_rate = s['12']['AB_dilution_rate']
AB_fraction_ai = s['12']['AB_fraction_ai']
results = outdoor_misting(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], OASS_fraction_ai, OASS_amount_of_product_in_can, CCTM_amount_ai_in_product, ORMS_application_rate, ORMS_dilution_rate, ORMS_fraction_ai, AB_application_rate, AB_dilution_rate, AB_fraction_ai)
risk_profile.update(results, SOP, duration)
sorted_RIs = {}
ri_id=0
for duration in risk_profile.results:
sorted_RIs[duration] = {}
for lifestage in risk_profile.results[duration]:
lifestage_final = lifestage
if pop_selection != "gen" and lifestage != 'adult':
continue
elif pop_selection != "gen":
lifestage_final = pop_selection
sorted_RIs[duration][lifestage_final] = risk_profile.results[duration][lifestage]
sorted_RIs[duration][lifestage_final].sort()
amended_RIs[duration][lifestage_final] = []
for l in sorted_RIs[duration][lifestage_final]:
n = list(l)
n.append(ri_id)
ri_id+=1
amended_RIs[duration][lifestage_final].append(n)
return amended_RIs
except Exception as e:
return e, str(e)
class IngredientOverviewForm(forms.Form):
calls = 0
title = "Assessment Background Information"
active_ingredient = forms.CharField(required=False)
#GardenAndTreesForm, InsectRellentsForm, PaintsAndPreservativesForm
SCENARIOS = [('generalhandler','Handler/Applicator (all scenarios)'),('insect','Insect Repellents'),('treatedpet','Treated Pets'),('lawn','Lawns/Turf'),('garden','Gardens And Trees'),('paint','Paints And Preservatives'),('impregnated_materials','Impregnated Materials'), ('indoor','Indoor'),('misting','Outdoor Misting ')]
exposure_scenarios = forms.MultipleChoiceField(choices=SCENARIOS, widget=CheckboxSelectMultipleBootstrap())
ROUTES = [('oral', 'Incidental Oral'), ('dermal', 'Dermal'), ('inhalation', 'Inhalation') , ('dietary', 'Granule/Pellet Ingestion')]
exposure_routes = forms.MultipleChoiceField(choices=ROUTES, widget=CheckboxSelectMultipleBootstrap(), initial = ['oral','dermal','inhalation','dietary'])
DURATIONS = [('short','Short-Term'),('intermediate','Intermediate-Term'),('long','Long-Term')]
exposure_durations = forms.MultipleChoiceField(choices=DURATIONS , widget=CheckboxSelectMultipleBootstrap())
TARGET_POP_CHOICES = [('gen','General Population (Adults + Children)'),('adult_female','Adult (Female Only)'),('adult_male','Adult (Male Only)')]
TARGET_POP_CHOICES_DICT = {}
for choice in TARGET_POP_CHOICES:
TARGET_POP_CHOICES_DICT[choice[0]] = choice[1]
target_population = forms.MultipleChoiceField(choices=TARGET_POP_CHOICES , widget=CheckboxSelectMultipleBootstrap(),initial=['gen'])
def __init__(self,*args,**kwargs):
super(IngredientOverviewForm,self).__init__(*args,**kwargs)
IngredientOverviewForm.calls += 1
def clean(self):
cleaned_data = super(IngredientOverviewForm, self).clean()
exposure_scenarios = cleaned_data.get("exposure_scenarios")
exposure_routes = cleaned_data.get("exposure_routes")
if exposure_routes and exposure_scenarios:
if 'dermal' in exposure_routes:
return cleaned_data
if 'oral' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['lawn','insect','paint','treatedpet','indoor','impregnated_materials', 'misting']]:
return cleaned_data
if 'inhalation' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['indoor','misting','generalhandler']]:
return cleaned_data
if 'dietary' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['lawn']]:
return cleaned_data
raise forms.ValidationError("No combinations of these routes and scenarios exist.")
return cleaned_data
class ToxForm(forms.Form):
calls = 0
title = "Toxicological Information"
POD_STUDY_CHOICES = [('',''),('route-specific','Route-specific'),('oral','Oral')]
ABS_STUDY_CHOICES = [('',''), ('human-study', 'Human Study'), ('animal-study', 'Animal Study'), ('POD or LOAEL/NOAEL comparison','Estimated by POD or LOAEL/NOAEL comparison'),('in vitro study','In vitro study'),('other','Other')]
TARGET_POP_CHOICES = [('gen','General Population (Adults + Children)'),('adult_female','Adult (Female Only)'),('adult_male','Adult (Male Only)')]
TARGET_POP_CHOICES_DICT = {}
for choice in TARGET_POP_CHOICES:
TARGET_POP_CHOICES_DICT[choice[0]] = choice[1]
def __init__(self,*args,**kwargs):
data = kwargs.pop('data_from_step_1',None)
self.data_from_step_1 = data
super(ToxForm,self).__init__(*args, **kwargs)
self.data_from_step_1 = self.initial['data_from_step_1']
ToxForm.calls += 1
logger.error("ToxForm __init__ calls: %s "%ToxForm.calls)
if self.data_from_step_1:
if 'dermal' in self.data_from_step_1['exposure_routes']:
self.fields['dermal_absorption'] = forms.FloatField(required=False, initial=1, label="Dermal Absorption (0-1)",min_value=0., max_value=1.)
self.fields['dermal_absorption_study'] = forms.ChoiceField(choices=ToxForm.ABS_STUDY_CHOICES,required=False,label="Dermal Absorption Study")
self.fields['dermal_POD_study'] = forms.ChoiceField(choices=ToxForm.POD_STUDY_CHOICES,required=False,label="Dermal POD Study" )
for duration in self.data_from_step_1['exposure_durations']:
if 'dermal' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['dermal_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Dermal POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]) )
self.fields['dermal_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0.,label="%s Term Dermal LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]) )
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['lawn','insect','paint','treatedpet','indoor','impregnated_materials','misting']] and 'oral' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['oral_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Oral POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
self.fields['oral_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0., label="%s Term Oral LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['indoor','misting','generalhandler']] and 'inhalation' in self.data_from_step_1['exposure_routes']:
self.fields['inhalation_absorption'] = forms.FloatField(required=False, initial=1, label="Inhalation Absorption (0-1)",min_value=0., max_value=1.)
self.fields['inhalation_absorption_study'] = forms.ChoiceField(choices=ToxForm.ABS_STUDY_CHOICES,required=False,label="Inhalation Absorption Study")
self.fields['inhalation_POD_study'] = forms.ChoiceField(choices=ToxForm.POD_STUDY_CHOICES,required=False, label="Inhalation POD Study")
for duration in self.data_from_step_1['exposure_durations']:
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['indoor','misting','generalhandler']] and 'inhalation' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['inhalation_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Inhalation POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
self.fields['inhalation_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0.,label="%s Term Inhalation LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
if 'lawn' in self.data_from_step_1['exposure_scenarios'] and 'dietary' in self.data_from_step_1['exposure_routes']:
if 'gen' in self.data_from_step_1['target_population']:
self.fields['dietary_POD'] = forms.FloatField(required=False, min_value=0.,label="Dietary POD (mg/kg/day) (Children)")
self.fields['dietary_LOC'] = forms.FloatField(required=False, initial=100,min_value=0., label="Dietary LOC (Children)")
#assert(self.data_from_step_1, Exception(self.data_from_step_1))
#raise Exception(self.data_from_step_1)
def clean(self, *args, **kwargs):
cleaned_data = super(ToxForm, self).clean()
for route in self.data_from_step_1['exposure_routes']:
if '%s_absorption'%(route) in self.fields:
absorption = cleaned_data.get('%s_absorption'%(route))
pod_study = cleaned_data.get('%s_POD_study'%(route))
if pod_study == 'route-specific' and absorption != 1:
msg = u"Absorption must be 1 for route specific POD studies."
self._errors['%s_absorption'%(route)] = self.error_class([msg])
self._errors['%s_POD_study'%(route)] = self.error_class([msg])
del cleaned_data['%s_POD_study'%(route)]
if '%s_absorption'%(route) in cleaned_data:
del cleaned_data['%s_absorption'%(route)]
# Always return the full collection of cleaned data.
return cleaned_data
class GeneralHandlerForm(forms.Form):
title = "General Handler Data Entry Form"
application_rate = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate_units = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate_form_map = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate['Dust/Powder']['Indoor Environment']['Plunger Duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Plunger Duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Bulb duster']['Perimeter/Spot/Bedbug; Crack and Crevice'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Bulb duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Electric/power duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Electric/power duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Hand crank duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Hand crank duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast'] = 0
application_rate['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Shaker can']['can'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Shaker can']['ft2'] = 0
application_rate['Liquid concentrate']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Broadcast Surface Spray'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Perimeter/ Spot/ Bedbug (course application)'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can with pin stream nozzle']['Perimeter/ Spot/ Bedbug (pin stream application); Crack and Crevice'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Space spray'] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Broadcast'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Perimeter/ Spot/ Bedbug (course application)'] = 0
application_rate['Ready-to-use']['Insect Repellent']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Insect Repellent']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Bait (granular, hand dispersal)'][''] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Wettable powders']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Granule']['Gardens / Trees']['Push-type rotary spreader'][''] = 0
application_rate['Granule']['Lawns / Turf']['Push-type rotary spreader'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Granule']['Lawns / Turf']['Belly grinder'][''] = 0
application_rate['Granule']['Gardens / Trees']['Spoon'][''] = 0
application_rate['Granule']['Lawns / Turf']['Spoon'][''] = 0
application_rate['Granule']['Gardens / Trees']['Cup'][''] = 0
application_rate['Granule']['Lawns / Turf']['Cup'][''] = 0
application_rate['Granule']['Gardens / Trees']['Hand dispersal'][''] = 0
application_rate['Granule']['Lawns / Turf']['Hand dispersal'][''] = 0
application_rate['Granule']['Gardens / Trees']['Shaker can']['can'] = 0
application_rate['Granule']['Gardens / Trees']['Shaker can']['ft2'] = 0
application_rate['Granule']['Lawns / Turf']['Shaker can'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Ready-to-use']['Paints / Preservatives']['Aerosol can'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Airless Sprayer'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Brush'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Manually-pressurized handwand'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Roller'][''] = 0
application_rate['Liquid concentrate']['Treated Pets']['Dip'][''] = 0
application_rate['Liquid concentrate']['Treated Pets']['Sponge'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Shampoo'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Spot-on'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Collar'][''] = 0
application_rate['Dust/Powder']['Treated Pets']['Shaker Can'][''] = 0
application_rate_units['Dust/Powder']['Indoor Environment']['Plunger Duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Plunger Duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Bulb duster']['Perimeter/Spot/Bedbug; Crack and Crevice'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Bulb duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Electric/power duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Electric/power duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Hand crank duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Hand crank duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Gardens / Trees']['Shaker can']['can'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Gardens / Trees']['Shaker can']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Broadcast Surface Spray'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Perimeter/ Spot/ Bedbug (course application)'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can with pin stream nozzle']['Perimeter/ Spot/ Bedbug (pin stream application); Crack and Crevice'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Space spray'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Insect Repellent']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Insect Repellent']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Gardens / Trees']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Lawns / Turf']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Broadcast'] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Perimeter/ Spot/ Bedbug (course application)'] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Gardens / Trees']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Lawns / Turf']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Indoor Environment']['Bait (granular, hand dispersal)'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Ready-to-use']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Push-type rotary spreader'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Push-type rotary spreader'][''] = 'lb ai/acre'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Granule']['Lawns / Turf']['Belly grinder'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Spoon'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Spoon'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Cup'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Cup'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Hand dispersal'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Hand dispersal'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Shaker can']['can'] = 'lb ai/can'
application_rate_units['Granule']['Gardens / Trees']['Shaker can']['ft2'] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Shaker can'][''] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Microencapsulated']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Paints / Preservatives']['Aerosol can'][''] = 'lb ai/12-oz can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Airless Sprayer'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Brush'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Manually-pressurized handwand'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Roller'][''] = 'lb ai/1-gal can'
application_rate_units['Liquid concentrate']['Treated Pets']['Dip'][''] = 'lb ai/pet'
application_rate_units['Liquid concentrate']['Treated Pets']['Sponge'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Trigger-spray bottle'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Aerosol can'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Shampoo'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Spot-on'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Collar'][''] = 'lb ai/pet'
application_rate_units['Dust/Powder']['Treated Pets']['Shaker Can'][''] = 'lb ai/pet'
for formulation in application_rate:
for scenario in application_rate[formulation]:
for application_method in application_rate[formulation][scenario]:
for application_type in application_rate[formulation][scenario][application_method]:
application_rate_form_map[formulation][scenario][application_method][application_type] = "%s, %s, %s, %s" %(formulation, scenario, application_method, application_type )
def __init__(self,*args,**kwargs):
self.data_from_general_handler_sub_scenario_step = kwargs.pop('data_from_general_handler_sub_scenario_step',None)
super(GeneralHandlerForm,self).__init__(*args,**kwargs)
application_rates = []
for formulation in GeneralHandlerForm.application_rate:
if self.data_from_general_handler_sub_scenario_step:
if formulation in self.data_from_general_handler_sub_scenario_step['formulations']:
for scenario in GeneralHandlerForm.application_rate[formulation]:
if scenario in self.data_from_general_handler_sub_scenario_step['sub_scenarios']:
for application_method in GeneralHandlerForm.application_rate[formulation][scenario]:
if application_method in self.data_from_general_handler_sub_scenario_step['equipment']:
application_rates.append((formulation, scenario, application_method, GeneralHandlerForm.application_rate[formulation][scenario][application_method]))
application_rates = sorted(application_rates, key=operator.itemgetter(1))
for formulation, scenario, application_method, application_rate in application_rates:
for application_type in application_rate:
self.fields[GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type]] = forms.FloatField(required=False,initial=0, label="%s [Application Rate (%s)]"%(GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type],GeneralHandlerForm.application_rate_units[formulation][scenario][application_method][application_type]),min_value=0.)
class GeneralHandlerSubScenariosForm(forms.Form):
title = "General Handler Sub Scenario Selection"
SUB_SCENARIOS_CHOICES = [('Insect Repellent','Insect Repellent'),('Treated Pets','Treated Pets'),('Lawns / Turf','Lawns / Turf'),('Gardens / Trees','Gardens / Trees'),('Paints / Preservatives','Paints / Preservatives'), ('Indoor Environment','Indoor Environment'),('Misting','Misting')]
sub_scenarios = forms.MultipleChoiceField(choices=SUB_SCENARIOS_CHOICES , widget=CheckboxSelectMultipleBootstrap())
FORMULATION_CHOICES = [('Dust/Powder','Dust/Powder'), ('Granule', 'Granule'),('Liquid concentrate','Liquid concentrate'), ('Microencapsulated','Microencapsulated'), ('Paints / Preservatives/ Stains','Paints / Preservatives/ Stains'), ('Ready-to-use','Ready-to-use'), ('Water-disersible Granule / Dry Flowable','Water-disersible Granule / Dry Flowable'), ('Wettable powders','Wettable powders'), ('Wettable powders in water-soluble packaging','Wettable powders in water-soluble packaging')]
formulations = forms.MultipleChoiceField(choices=FORMULATION_CHOICES , widget=CheckboxSelectMultipleBootstrap(), required=False)
EQUIPMENT_CHOICES = [('Aerosol can with pin stream nozzle','Aerosol can with pin stream nozzle'),('Aerosol can','Aerosol can'),('Airless Sprayer','Airless Sprayer'),('Backpack','Backpack'),('Bait (granular, hand dispersal)','Bait (granular, hand dispersal)'),('Belly grinder','Belly grinder'),('Brush','Brush'),('Bulb duster','Bulb duster'),('Collar','Collar'),('Cup','Cup'),('Dip','Dip'),('Electric/power duster','Electric/power duster'),('Hand crank duster','Hand crank duster'),('Hand dispersal','Hand dispersal'),('Hose-end Sprayer','Hose-end Sprayer'),('Manually-pressurized handwand','Manually-pressurized handwand'),('Manually-pressurized handwand (w/ or w/o pin stream nozzle)', 'Manually-pressurized handwand (w/ or w/o pin stream nozzle)'),('Plunger Duster','Plunger Duster'), ('Push-type rotary spreader', 'Push-type rotary spreader'),('Roller','Roller'),('Shaker can','Shaker can'),('Shampoo','Shampoo'),('Sponge','Sponge'),('Spot-on','Spot-on'),('Sprinkler can','Sprinkler can'
),('Trigger-spray bottle','Trigger-spray bottle')]
equipment = forms.MultipleChoiceField(choices=EQUIPMENT_CHOICES , widget=CheckboxSelectMultipleBootstrap(), required=False)
n_inputs_equipment = defaultdict(lambda : defaultdict(lambda : defaultdict(int)))
n_inputs_formulation = defaultdict(lambda : defaultdict(int))
n_inputs_scenarios = defaultdict(int)
for i in xrange(0, len(SUB_SCENARIOS_CHOICES)):
for j in xrange(0, len(FORMULATION_CHOICES)):
for k in xrange(0, len(EQUIPMENT_CHOICES)):
formulation = FORMULATION_CHOICES[j][0]
scenario = SUB_SCENARIOS_CHOICES[i][0]
application_method = EQUIPMENT_CHOICES[k][0]
try:
size = len(GeneralHandlerForm.application_rate[formulation][scenario][application_method])
n_inputs_equipment[i][j][k] += size
n_inputs_formulation[i][j] += size
n_inputs_scenarios[i] += size
except:
pass
def __init__(self,*args,**kwargs):
super(GeneralHandlerSubScenariosForm,self).__init__(*args,**kwargs)
def clean(self):
cleaned_data = super(GeneralHandlerSubScenariosForm, self).clean()
equipment = cleaned_data.get("equipment")
formulations = cleaned_data.get("formulations")
sub_scenarios = cleaned_data.get("sub_scenarios")
if sub_scenarios == ['Misting']:
return cleaned_data
elif sub_scenarios:
if formulations == [] or equipment == []:
raise forms.ValidationError("Both formulations and equipment need to be selected for %s."%", ".join(sub_scenarios))
count = 0
for scenario in sub_scenarios:
for formulation in formulations:
for application_method in equipment:
count += len(GeneralHandlerForm.application_rate[formulation][scenario][application_method])
if count == 0:
raise forms.ValidationError("No scenarios available for this selection of formulations and equipment. Ensure at least one of the equipment choices has greater than 1 in brackets.")
return cleaned_data
class TreatedPetForm(forms.Form):
title = "Treated Pet Data Entry Form"
amount_applied_form_map = defaultdict(dict)
for animal in ['cat','dog']:
for size in ['small','medium','large']:
amount_applied_form_map[animal][size] = "%s %s" %(size, animal)
# amount_applied['Other Pet'][''] = 0
fraction_ai = forms.FloatField(required=False,initial=0,min_value=0.,max_value=1.,label ="Fraction ai in product(0-1)");#defaultdict(dict)
default_pet_weights = {'cat':{},'dog':{}} #lb
default_pet_weights['dog'] = {'small':10.36535946,'medium':38.16827225,'large':76.50578234} #lb
default_pet_weights['cat'] = {'small':3.568299485,'medium':7.8300955,'large':16.13607146}
pet_weight = default_pet_weights['dog']['medium']
#Surface Area (cm2) = ((12.3*((BW (lb)*454)^0.65))
def pet_surface_area(lb):
return 12.3*((lb*454)**0.65)
def __init__(self,*args,**kwargs):
super(TreatedPetForm,self).__init__(*args,**kwargs)
for animal in TreatedPetForm.amount_applied_form_map:
for size in TreatedPetForm.amount_applied_form_map[animal]:
TreatedPetForm.amount_applied_form_map[animal][size] = "%s %s" %(size, animal)
self.fields[TreatedPetForm.amount_applied_form_map[animal][size]] = forms.FloatField(required=False,initial=0,min_value=0.,label = "Amount of product applied to a %s %s (g)" %(size, animal))
class LawnTurfForm(forms.Form):
title = "Lawn and Turf Data Entry Form"
liquid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid Application Rate (lb ai/acre)")
solid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Solid Application Rate (lb ai/acre)")
liquid_ttr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid TTR (calculated from application rate if not available) (ug/cm2)")#ORt = TTRt
solid_ttr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Solid TTR (calculated from application rate if not available) (ug/cm2)")#ORt = TTRt
fraction_ai_in_pellets = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in pellets/granules (0-1)")
class GardenAndTreesForm(forms.Form):
title = "Garden and Trees Data Entry Form"
liquid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid Application Rate (lb ai/acre)")
solid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Solid Application Rate (lb ai/acre)")
liquid_dfr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid DFR (calculated from application rate if not available) (ug/cm2)")
solid_dfr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Solid DFR (calculated from application rate if not available) (ug/cm2)")
class InsectRepellentsForm(forms.Form):
title = "Insect Repellent Data Entry Form"
formulations = ['Aerosol', 'Pump spray', 'Lotion','Towelette']
amount_ai_formulations_form_map = defaultdict(dict)
for sunscreen_status in ['without','with']:
for formulation in formulations:
amount_ai_formulations_form_map[sunscreen_status][formulation] = "%s repellent %s sunscreen" %(formulation, sunscreen_status)
def __init__(self,*args,**kwargs):
super(InsectRepellentsForm,self).__init__(*args,**kwargs)
for sunscreen_status in ['without','with']:
for formulation in InsectRepellentsForm.formulations:
self.fields[InsectRepellentsForm.amount_ai_formulations_form_map[sunscreen_status][formulation]] = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1., label = "Fraction of ai in %s repellent %s sunscreen (mg ai / mg product)"%(formulation,sunscreen_status))
class PaintsAndPreservativesForm(forms.Form):
title = "Paints and Preservatives Data Entry Form"
surface_residue_concentration = forms.FloatField(required=False,initial=0, min_value=0., label="Surface Residue Concentration (mg ai/cm^2)")
DEFAULT_FRACTION_OF_BODY_EXPOSED = 0.31
DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY = 0.14
EXPOSURE_TIME = {'indoor':4., 'outdoor':1.5}
HAND_TO_MOUTH_EVENTS_PER_HOUR = {'indoor':20., 'outdoor':13.9}
indoor_or_outdoor = forms.ChoiceField(choices=[('indoor','Indoor'),('outdoor','Outdoor')], initial='indoor', label="Location of interest (indoor/outdoor)")
class ImpregnatedMaterialsForm(forms.Form):
title = "Impregnated Materials Data Entry Form"
surface_residue_concentration = forms.FloatField(required=False)
weight_fraction_of_active_ingredient = forms.FloatField(required=False)
MATERIAL_CHOICES = [('cotton', 'Cotton'), ('light_cotton_synthetic_mix', 'Light Cotton/Synthetic Mix'), ('heavy_cotton_synthetic_mix','Heavy Cotton/Synthetic Mix'),('all_synthetics','All Synthetics'),('household_carpets','Household Carpets'),('plastic_polymers','Plastic Polymers'), ('vinyl_flooring','Vinyl Flooring')]
material_type = forms.ChoiceField(choices=MATERIAL_CHOICES,required=False)
MATERIAL_CHOICES_DICT = {}
for choice in MATERIAL_CHOICES:
MATERIAL_CHOICES_DICT[choice[0]]=choice[1]
MATERIAL_WEIGHT_TO_SURFACE_AREA_DENSITY = {'cotton': 20., 'light_cotton_synthetic_mix': 10., 'heavy_cotton_synthetic_mix':24.,'all_synthetics':1.,'household_carpets':120.,'plastic_polymers':100., 'vinyl_flooring':40.}
#DERMAL
BODY_FRACTION_CHOICES = [('pants_jacket_shirt','Pants, Jacket, or Shirts'), ('total', 'Total Body Coverage'), ('floor', 'Mattresses, Carpets or Flooring'), ('handlers','Handlers')]
BODY_FRACTION_CHOICES_DICT = {}
for choice in BODY_FRACTION_CHOICES:
BODY_FRACTION_CHOICES_DICT[choice[0]]=choice[1]
body_fraction_exposed_type = forms.ChoiceField(choices=BODY_FRACTION_CHOICES,required=True)
BODY_FRACTION_EXPOSED = {'pants_jacket_shirt':0.5, 'total':1, 'floor':0.5, 'handlers':0.11}
protective_barrier_present = forms.ChoiceField(choices=[('no','No'),('yes','Yes')],required=True,initial='no', label = "Is there a potential protective barried present (such as bed sheets or other fabrics)?")
PROTECTION_FACTOR = {'no':1,'yes':0.5}
#HtM
TYPE_OF_FLOORING_CHOICES = [('',''), ('carpet','Carpet or Textiles'), ('hard', 'Hard Surface or Flooring')]
TYPE_OF_FLOORING_CHOICES_DICT = {}
for choice in TYPE_OF_FLOORING_CHOICES:
TYPE_OF_FLOORING_CHOICES_DICT[choice[0]]=choice[1]
type_of_flooring = forms.ChoiceField(choices=TYPE_OF_FLOORING_CHOICES ,required=False)
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
FLOOR_EXPOSURE_TIME = {'':0., 'carpet':4.,'hard':2.}
DEFAULT_FRACTION_OF_BODY_EXPOSED = 0.31
type_of_flooring = forms.ChoiceField(choices=[('',''), ('carpet','Carpet'), ('hard', 'Hard Surface')] ,required=False)
DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY = 0.14
EXPOSURE_TIME = {'indoor':4., 'outdoor':1.5}
HAND_TO_MOUTH_EVENTS_PER_HOUR = {'indoor':20., 'outdoor':13.9}
indoor_or_outdoor = forms.ChoiceField(choices=[('indoor','Indoor'),('outdoor','Outdoor')], initial='indoor', label="Location of interest (indoor/outdoor)")
#daily_material_to_skin_transfer_efficency = forms.FloatField(required=False,initial=0.14)
#OtM
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
OBJECT_TO_MOUTH_EVENTS_PER_HOUR = {'':14.,'indoor':14., 'outdoor':8.8}
class IndoorEnvironmentsForm(forms.Form):
title = "Indoor Environments Data Entry Form"
space_spray_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Aerosol Space Sprays (0-1)")
space_spray_amount_of_product = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Aerosol Space Spray can (g/can)")
SPACE_SPRAY_RESTRICTION_CHOICES = [('NA','Not Applicable')] + [ (t/60., "%s minutes"%t) for t in [0,5,10,15,20,30,40,60,120]]
space_spray_restriction = forms.ChoiceField(choices=SPACE_SPRAY_RESTRICTION_CHOICES)
molecular_weight = forms.FloatField(required=False,initial=0, min_value=0.,label="Molecular weight (g/mol)")
vapor_pressure = forms.FloatField(required=False,initial=0, min_value=0.,label="Vapor pressure (mmHg)")
broadcast_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on broadcast (ug/cm^2)")
coarse_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on perimeter/spot/bedbug (coarse) (ug/cm^2)")
pin_stream_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on perimeter/spot/bedbug (pin stream) (ug/cm^2)")
crack_and_crevice_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on cracks and crevices (ug/cm^2)")
foggers_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited by foggers (ug/cm^2)")
space_sprays_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited by space sprays (ug/cm^2)")
matress_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on mattress (ug/cm^2)")
class OutdoorMistingForm(forms.Form):
title = "Outdoor Misting Data Entry Form"
#OASS
OASS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Aerosol Space Sprays (0-1)")
OASS_amount_of_product_in_can = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Outdoor Aerosol Space Spray can (g/can)")
# CCTM
CCTM_amount_ai_in_product = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount ai in Candles, Coils, Torches, and/or Mats (mg ai/product)")
# ORMS
#product app rate on label:
ORMS_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Outdoor Residential Misting System(oz/1000 cu.ft.)")
#else
ORMS_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Dilution rate in Outdoor Residential Misting System (vol product/vol total solution) (0-1)")
ORMS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Residential Misting System (0-1)")
# AB
#product app rate on label:
AB_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Animal Barns(oz/1000 cu.ft.)")
#else
AB_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Dilution rate in Animal Barns (vol product/vol total solution) (0-1)")
AB_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Animal Barns (0-1)")
class OutdoorMistingGeneralHandlerForm(forms.Form):
title = "Outdoor Misting General Handler Data Entry Form"
OASS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Aerosol Space Sprays (0-1)")
OASS_amount_of_product_in_can = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Outdoor Aerosol Space Spray can (g/can)")
# ORMS
#product app rate on label:
ORMS_DRUM_CHOICES = [(30,'30 gallons'), (55, '55 gallons')]
ORMS_drum_size = forms.ChoiceField(choices=ORMS_DRUM_CHOICES,required=False, initial=55, label="Outdoor Residential Misting System Drum Size")
ORMS_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Outdoor Residential Misting System(oz/1000 cu.ft.)")
#else
ORMS_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Dilution rate in Outdoor Residential Misting System (vol product/vol total solution)")
ORMS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Residential Misting System (0-1)")
# AB
#product app rate on label:
AB_DRUM_CHOICES = [(30,'30 gallons'), (55, '55 gallons'), (125, '125 gallons')]
AB_drum_size = forms.ChoiceField(choices=AB_DRUM_CHOICES,required=False, initial=55, label="Animal Barn Drum Size" )
#else
AB_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Dilution rate in Animal Barns (vol product/vol total solution)")
AB_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Animal Barns (0-1)")
| agpl-3.0 | -3,916,772,724,174,924,000 | 73.444121 | 994 | 0.63773 | false | 3.428808 | false | false | false |
mathieulavoie/Bitcluster | web/web.py | 1 | 5465 | #from web import app
from web.dao import getNodeFromAddress, getNodeInformation, getTransations, groupByAllDistribution, groupbyNode, \
groupbyAmount, groupbyDate
from flask import *
import re
import csv
import io
from datetime import datetime, timedelta
app = Flask(__name__)
@app.route('/',methods=['POST', 'GET'])
def web_root():
if request.method == 'POST':
address = request.form['q']
if address.isnumeric():
return redirect(url_for('get_node_request',node_id=address))
else:
pattern = re.compile("^([1-9ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz])+$")
if pattern.match(address):
node_id = getNodeFromAddress(address)
if node_id is not None:
return redirect(url_for('get_node_request',node_id=node_id))
return render_template('index.html',message="Invalid or inexistant address")
return render_template('index.html')
@app.route('/nodes/<int:node_id>')
def get_node_request(node_id):
infos = getNodeInformation(node_id)
limit =100
truncated_trx_in,trx_in = trim_collection(infos['transactions']['in'],limit)
truncated_trx_out,trx_out = trim_collection(infos['transactions']['out'],limit)
truncated_by_node_in,infos['incomes_grouped']['by_node'] = trim_collection(infos['incomes_grouped']['by_node'],limit)
truncated_by_node_out,infos['outcomes_grouped']['by_node'] = trim_collection(infos['outcomes_grouped']['by_node'],limit)
truncated_by_amount_in,infos['incomes_grouped']['by_amount']['amount_usd'] = trim_collection(infos['incomes_grouped']['by_amount']['amount_usd'],limit)
truncated_by_amount_out,infos['outcomes_grouped']['by_amount']['amount_usd'] = trim_collection(infos['outcomes_grouped']['by_amount']['amount_usd'],limit)
infos['transactions'] = {'in': trx_in, 'out':trx_out}
return render_template('node_details.html',informations=infos, truncated=(truncated_trx_in or truncated_trx_out or truncated_by_node_in or truncated_by_node_out or truncated_by_amount_in or truncated_by_amount_out))
def trim_collection(collection, limit):
if len(collection) > limit:
return True, collection[0:limit]
return False, collection
@app.route('/nodes/<int:node_id>/download/json/<direction>')
def download_transations_json(node_id,direction):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
transactions = getTransations(node_id,direction)
grouped = groupByAllDistribution(transactions,direction)
response = jsonify({"transactions":transactions, "groups":grouped})
response.headers['Content-disposition'] = "attachment;filename=transactions_%d_%s.json"% (node_id, direction)
return response
@app.route('/nodes/<int:node_id>/download/csv/<direction>')
def download_transations_csv(node_id,direction):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
output = io.StringIO()
fieldnames = ['trx_date','block_id','source_n_id','destination_n_id','amount', 'amount_usd','source','destination']
writer = csv.DictWriter(output, fieldnames=fieldnames)
writer.writeheader()
for trx in getTransations(node_id,direction):
writer.writerow(trx)
return Response(
output.getvalue(),
mimetype="text/csv",
headers={"Content-disposition":"attachment; filename=transactions_%d_%s.csv"% (node_id, direction)})
@app.route('/nodes/<int:node_id>/download/csv/<direction>/<grouping>')
def download_grouped_transactions(node_id,direction,grouping):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
output = io.StringIO()
transactions = getTransations(node_id,direction)
writer = csv.writer(output)
if grouping == "by_node":
writer.writerow(['node_id','amount_usd','amount_btc','transaction_count'])
for k,v in groupbyNode(transactions,direction):
writer.writerow([k,v['amount_usd'],v['amount_btc'],len(v['transactions'])])
elif grouping == "by_amount":
writer.writerow(['amount_usd','frequency'])
for k,v in groupbyAmount(transactions)['amount_usd']:
writer.writerow([k,v])
elif grouping == "by_date":
date_format = '%Y-%m-%d'
sorted_by_date = groupbyDate(transactions)
min_date = datetime.strptime(sorted_by_date[0][0],date_format)
max_date = datetime.strptime(sorted_by_date[-1][0],date_format)
delta = max_date - min_date
index = 0
writer.writerow(['date','amount_usd','amount_btc','transaction_count'])
for date in [min_date + timedelta(days=x) for x in range(0,delta.days+1)]:
strdate = date.strftime(date_format)
k,v = sorted_by_date[index]
if k == strdate:
writer.writerow([k,v['amount_usd'],v['amount_btc'],len(v['transactions'])])
index +=1
else:
writer.writerow([strdate,0,0,0])
else:
return Response(response="Invalid grouping. Possible options : by_node , by_amount , by_date",status=500)
return Response(
output.getvalue(),
mimetype="text/csv",
headers={"Content-disposition":"attachment; filename=transactions_%d_%s_%s.csv"% (node_id, direction,grouping)})
| mit | -8,318,828,850,818,100,000 | 37.485915 | 219 | 0.654163 | false | 3.771567 | false | false | false |
CodingVault/LeetCodeInPython | sorted_array_to_binary_tree.py | 1 | 1250 | #!/usr/bin/env python
# encoding: utf-8
"""
sorted_array_to_binary_tree.py
Created by Shengwei on 2014-07-03.
"""
# https://oj.leetcode.com/problems/convert-sorted-array-to-binary-search-tree/
# tags: easy, tree, array, sorted, convert, D&C
"""
Given an array where elements are sorted in ascending order, convert it to a height balanced BST.
"""
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param num, a list of integers
# @return a tree node
def sortedArrayToBST(self, num):
def convert_array(left, right):
"""Convert num[left:right] to a (sub)tree."""
# num[x:x] is an empty list (x can be any number)
if left >= right:
return None
# mid point at the very middle of num[left:right]
# or the right one of the middle two
mid = (left + right) / 2
root = TreeNode(num[mid])
root.left = convert_array(left, mid)
root.right = convert_array(mid + 1, right)
return root
return convert_array(0, len(num))
| apache-2.0 | -8,530,960,155,454,743,000 | 26.777778 | 97 | 0.5672 | false | 3.612717 | false | false | false |
balazssimon/ml-playground | udemy/lazyprogrammer/deep-reinforcement-learning-python/mountaincar/q_learning.py | 1 | 6102 | # This takes 4min 30s to run in Python 2.7
# But only 1min 30s to run in Python 3.5!
#
# Note: gym changed from version 0.7.3 to 0.8.0
# MountainCar episode length is capped at 200 in later versions.
# This means your agent can't learn as much in the earlier episodes
# since they are no longer as long.
import gym
import os
import sys
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from gym import wrappers
from datetime import datetime
from sklearn.pipeline import FeatureUnion
from sklearn.preprocessing import StandardScaler
from sklearn.kernel_approximation import RBFSampler
from sklearn.linear_model import SGDRegressor
# SGDRegressor defaults:
# loss='squared_loss', penalty='l2', alpha=0.0001,
# l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
# verbose=0, epsilon=0.1, random_state=None, learning_rate='invscaling',
# eta0=0.01, power_t=0.25, warm_start=False, average=False
# Inspired by https://github.com/dennybritz/reinforcement-learning
class FeatureTransformer:
def __init__(self, env, n_components=500):
observation_examples = np.array([env.observation_space.sample() for x in range(10000)])
scaler = StandardScaler()
scaler.fit(observation_examples)
# Used to converte a state to a featurizes represenation.
# We use RBF kernels with different variances to cover different parts of the space
featurizer = FeatureUnion([
("rbf1", RBFSampler(gamma=5.0, n_components=n_components)),
("rbf2", RBFSampler(gamma=2.0, n_components=n_components)),
("rbf3", RBFSampler(gamma=1.0, n_components=n_components)),
("rbf4", RBFSampler(gamma=0.5, n_components=n_components))
])
example_features = featurizer.fit_transform(scaler.transform(observation_examples))
self.dimensions = example_features.shape[1]
self.scaler = scaler
self.featurizer = featurizer
def transform(self, observations):
# print "observations:", observations
scaled = self.scaler.transform(observations)
# assert(len(scaled.shape) == 2)
return self.featurizer.transform(scaled)
# Holds one SGDRegressor for each action
class Model:
def __init__(self, env, feature_transformer, learning_rate):
self.env = env
self.models = []
self.feature_transformer = feature_transformer
for i in range(env.action_space.n):
model = SGDRegressor(learning_rate=learning_rate)
model.partial_fit(feature_transformer.transform( [env.reset()] ), [0])
self.models.append(model)
def predict(self, s):
X = self.feature_transformer.transform([s])
result = np.stack([m.predict(X) for m in self.models]).T
assert(len(result.shape) == 2)
return result
def update(self, s, a, G):
X = self.feature_transformer.transform([s])
assert(len(X.shape) == 2)
self.models[a].partial_fit(X, [G])
def sample_action(self, s, eps):
# eps = 0
# Technically, we don't need to do epsilon-greedy
# because SGDRegressor predicts 0 for all states
# until they are updated. This works as the
# "Optimistic Initial Values" method, since all
# the rewards for Mountain Car are -1.
if np.random.random() < eps:
return self.env.action_space.sample()
else:
return np.argmax(self.predict(s))
# returns a list of states_and_rewards, and the total reward
def play_one(model, env, eps, gamma):
observation = env.reset()
done = False
totalreward = 0
iters = 0
while not done and iters < 10000:
action = model.sample_action(observation, eps)
prev_observation = observation
observation, reward, done, info = env.step(action)
# update the model
next = model.predict(observation)
# assert(next.shape == (1, env.action_space.n))
G = reward + gamma*np.max(next[0])
model.update(prev_observation, action, G)
totalreward += reward
iters += 1
return totalreward
def plot_cost_to_go(env, estimator, num_tiles=20):
x = np.linspace(env.observation_space.low[0], env.observation_space.high[0], num=num_tiles)
y = np.linspace(env.observation_space.low[1], env.observation_space.high[1], num=num_tiles)
X, Y = np.meshgrid(x, y)
# both X and Y will be of shape (num_tiles, num_tiles)
Z = np.apply_along_axis(lambda _: -np.max(estimator.predict(_)), 2, np.dstack([X, Y]))
# Z will also be of shape (num_tiles, num_tiles)
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(X, Y, Z,
rstride=1, cstride=1, cmap=matplotlib.cm.coolwarm, vmin=-1.0, vmax=1.0)
ax.set_xlabel('Position')
ax.set_ylabel('Velocity')
ax.set_zlabel('Cost-To-Go == -V(s)')
ax.set_title("Cost-To-Go Function")
fig.colorbar(surf)
plt.show()
def plot_running_avg(totalrewards):
N = len(totalrewards)
running_avg = np.empty(N)
for t in range(N):
running_avg[t] = totalrewards[max(0, t-100):(t+1)].mean()
plt.plot(running_avg)
plt.title("Running Average")
plt.show()
def main(show_plots=True):
env = gym.make('MountainCar-v0')
ft = FeatureTransformer(env)
model = Model(env, ft, "constant")
gamma = 0.99
if 'monitor' in sys.argv:
filename = os.path.basename(__file__).split('.')[0]
monitor_dir = './' + filename + '_' + str(datetime.now())
env = wrappers.Monitor(env, monitor_dir)
N = 300
totalrewards = np.empty(N)
for n in range(N):
# eps = 1.0/(0.1*n+1)
eps = 0.1*(0.97**n)
if n == 199:
print("eps:", eps)
# eps = 1.0/np.sqrt(n+1)
totalreward = play_one(model, env, eps, gamma)
totalrewards[n] = totalreward
if (n + 1) % 100 == 0:
print("episode:", n, "total reward:", totalreward)
print("avg reward for last 100 episodes:", totalrewards[-100:].mean())
print("total steps:", -totalrewards.sum())
if show_plots:
plt.plot(totalrewards)
plt.title("Rewards")
plt.show()
plot_running_avg(totalrewards)
# plot the optimal state-value function
plot_cost_to_go(env, model)
if __name__ == '__main__':
# for i in range(10):
# main(show_plots=False)
main() | apache-2.0 | 6,356,171,879,016,205,000 | 31.636364 | 93 | 0.674861 | false | 3.194764 | false | false | false |
dimas-lex/osbb | osb/osb/billing/testing/AccountsServiceTest.py | 1 | 2043 | # -*- coding: utf-8 -*-
from django.test import TestCase
from osb.billing.models import Accounts, Services
# from osb.billing.Services.AccountsService import AccountsService
from osb.billing.Services.AccountsService import AccountsService
from osb.billing.Services.ServiceService import *
class AccountsServiceTest(TestCase):
def setUp(self):
self.accountServ = AccountsService()
self.accountServ.create(uid="1", name="lion", address="pr")
self.accountServ.create(uid="2", name="cat", address="pr2")
self.accountServ.create(uid="3", name="cat", address="pr2", porch=3)
def test_01_get_all(self):
""" Test 'get_all' method """
print self.test_01_get_all.__doc__
self.assertEqual(len(self.accountServ.get_all()), 3)
def test_02_get_by_porch(self):
""" Test 'get_by_porch' method """
print self.test_02_get_by_porch.__doc__
self.assertEqual(len(self.accountServ.get_by_porch(porch=3)), 1)
def test_03_create(self):
""" Test 'create' method """
print self.test_03_create.__doc__
self.assertTrue(
isinstance(
self.accountServ.create(uid="4", name="dog", address="pr"),
Accounts
)
)
def test_04_update(self):
""" Test 'update' method """
print self.test_04_update.__doc__
self.assertTrue( self.accountServ.update(name="dog", uid="3", address="prr") )
def test_05_delete(self):
""" Test 'delete' method """
print self.test_05_delete.__doc__
self.assertTrue( self.accountServ.delete(uid="3") )
# def test_06_print(self):
# """ Just #print out results """
# print self.test_06_print.__doc__
# accounts = self.accountServ.get_all()
# for acc in accounts:
# print ( " ".join(("uid", acc.uid, "name", acc.name, "address", acc.address, "porch", str(acc.porch), "deleted", str(acc.deleted) )) )
# self.assertTrue(True) | gpl-2.0 | -9,115,395,383,184,236,000 | 33.644068 | 148 | 0.594224 | false | 3.47449 | true | false | false |
FAForever/faftools | parseudp/FAPacket.py | 1 | 10060 | #!/usr/bin/env python
import sys
import struct
import binascii
import string
import argparse
import zlib
from curses import ascii
# expects tshark on stdin as in:
# tshark -r game.pcap -R 'ip.addr==192.168.0.101' -T fields -d udp.port==6112,echo -e ip.src -e udp.srcport -e ip.dst -e udp.dstport -e frame.time_relative -e echo.data | python FAPacket.py -e -p
# any non-FA packets will crash the parser
# running:
# FAPacket.py [ -p ] [ -e ]
# -p print command stream packets
# -e print encapsulation packets
def hexdump_hash(data):
res = ''
for i in range(0, len(data)):
res += '{0:02x}'.format(ord(data[i]))
return res
def hexdump(data, indent):
res = ''
for i in range(0, len(data), 16):
if i:
for k in range(0, indent):
res += ' '
for j in range(i, min(i + 16, len(data))):
res += '{0:02x} '.format(ord(data[j]))
for k in range(min(i + 16, len(data)), i + 16):
res += ' '
for j in range(i, min(i + 16, len(data))):
if ascii.isprint(data[j]):
res += data[j]
else:
res += '.'
res += '\n'
return res
class FAPacket:
def __init__(self, data):
self.type = ord(data[0])
self.len = ord(data[1]) | ord(data[2]) << 8;
self.data = data[3:]
d = { }
d[0] = 1
d[0x32] = 1
d[0x33] = 1
d[0x34] = 1
d[1] = 1
d[3] = 1
self.decodable = d
def is_advance(self):
return self.type == 0
def is_ack(self):
return self.type == 0x32
def is_set_cmdsrc(self):
return self.type == 1
def cmdsrc(self):
return ord(self.data[0])
def ack_cmdsource(self):
return ord(self.data[0])
def pp_data(self, indent):
return hexdump(self.data, indent)
def can_decode(self):
return self.type in self.decodable
def simtick(self):
if self.type == 0x32:
return struct.unpack("<bL", self.data)[1]
if self.type == 0x33 or self.type == 0x34 or self.type == 0:
return struct.unpack("<L", self.data)[0]
def decode(self):
if self.type == 0:
return "ADV {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 0x32:
return "ACK {0} {1}".format(self.ack_cmdsource(), self.simtick())
elif self.type == 0x33:
return "SIM {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 0x34:
return "FIN {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 1:
return "CMDSOURCE {0}".format(ord(self.data[0]))
elif self.type == 3:
(h, s) = struct.unpack("<16sL", self.data)
return "HASH {0} {1}".format(s, hexdump_hash(h))
else:
return "(error)"
class FAEncap(object):
def __init__(self, src, srcport, dst, dstport, time, packet):
self.offset = 0
self.src = src
self.dst = dst
self.srcport = srcport
self.dstport = dstport
self.time = time
if ord(packet[0]) == 8:
self.type = 8
self.data = packet[1:]
self.len = len(packet) - 1
elif ord(packet[0]) == 0:
self.type = 0
self.data = packet[1:]
self.len = len(packet) - 1
elif ord(packet[0]) == 255:
self.type=255
self.data=''
self.len = len(packet) - 1
else:
(self.type, self.mask, self.seq, self.ack, self.seq2, self.ack2, self.len) = struct.unpack("<bLHHHHH", packet[0:15])
self.data = packet[15:]
def src_full(self):
return src + ":" + srcport
def dst_full(self):
return dst + ":" + dstport
def connection(self):
return self.src_full() + "->" + self.dst_full()
def pp_data(self, indent):
if self.type == 8:
return self.data
else:
return hexdump(self.data, indent)
def packets(self):
ret = []
while True:
p = self.next_packet()
if p == None:
return ret
ret.append(p)
def next_packet(self):
if self.type != 4:
return None
if self.offset + 3 > len(self.data):
return None
len_ = ord(self.data[self.offset + 1]) | ord(self.data[self.offset + 2]) << 8
if self.offset + len_ > len(self.data):
return None
offset = self.offset;
self.offset += len_
if offset == self.offset:
sys.stdout.write("waarg {0} {1} {2}".format(offset, self.offset, binascii.hexlify(self.data)))
return FAPacket(self.data[offset : self.offset])
def prepend_remaining(self, r):
self.data = str(r) + str(self.data)
def remaining(self):
if self.type == 4:
return self.data[self.offset:]
return ''
class FAPeerState(object):
def __init__(self):
self.addr_to_cmdsrc = { }
self.cmdsrc_to_addr = [ ]
self.simtick = [ ]
self.ack_simtick = [ ]
def process_egress(self, addr, packet):
if packet.is_set_cmdsrc():
self.cmdsource = packet.cmdsrc()
if packet.is_advance():
self.simtick[self.addr_to_cmdsrc[addr] ] += packet.simtick()
elif packet.is_ack():
s1 = self.addr_to_cmdsrc[addr]
s2 = packet.ack_cmdsource()
self.ack_simtick[s1][s2] = packet.simtick()
def process_ingress(self, addr, packet):
if packet.is_set_cmdsrc():
s = packet.cmdsrc()
self.addr_to_cmdsrc[addr] = s
while len(self.cmdsrc_to_addr) <= s:
self.cmdsrc_to_addr.append('')
self.simtick.append(0)
self.ack_simtick.append(0)
self.cmdsrc_to_addr[s] = addr
argp = argparse.ArgumentParser(prog = "PROG")
argp.add_argument("-e", action="store_true")
argp.add_argument("-t", action="store_true")
argp.add_argument("-p", action="store_true")
args = argp.parse_args()
remain = { }
inflate = { }
inflate_remain = { }
cmdpackets_seen = { }
future = { }
c32 = [ 0, 0, 0 ]
c33 = 0
c34 = 0
tick = 0
seq_seen = { }
for line in sys.stdin:
(src, srcport, dst, dstport, time, data) = line.split();
#print "*{0}*{1}*{2}*{3}*{4}*{5}".format(src, srcport, dst, dstport, time, data);
e = FAEncap(src, srcport, dst, dstport, time, binascii.unhexlify(data.translate(None, ':')))
if not e.connection() in seq_seen:
seq_seen[e.connection()] = {}
if not e.connection() in remain:
remain[e.connection()] = ''
if not e.connection() in future:
future[e.connection()] = { }
s = '{0} {1} type={2} len={3: 4d}'.format(e.time, e.connection(), e.type, e.len)
if e.type != 4:
print(s)
if e.len:
print(' ' * 7, hexdump(e.data, 8))
elif e.type == 4:
if e.seq2 in seq_seen[e.connection()]:
continue
if len(seq_seen[e.connection()]) and not e.seq2 - 1 in seq_seen[e.connection()]:
print("!! packet received out of sequence !! {0} cseq={1}".format(e.connection(), e.seq2))
future[e.connection()][e.seq2] = e
continue
future[e.connection()][e.seq2] = e
seq_ix = e.seq2
while seq_ix in future[e.connection()]:
e = future[e.connection()][seq_ix]
seq_seen[e.connection()][e.seq2] = 1
seq_ix += 1
s = '{0} {1} type={2} len={3: 4d}'.format(e.time, e.connection(), e.type, e.len)
s += ' cseq={0} cack={1} mask={2} eseq={3} eack={4}'.format(e.seq2, e.ack2, e.mask, e.seq, e.ack)
if args.e:
print(s)
if not e.connection() in inflate:
print(' ' * 7, e.pp_data(8))
if args.p:
if not e.connection() in cmdpackets_seen:
if e.data == "\x02\x00\x00\x00\xff\xff":
print(" !!deflate detected!! on " + e.connection())
inflate[e.connection()] = zlib.decompressobj()
if e.connection() in inflate:
if not e.connection() in cmdpackets_seen:
data = "\x78\x9c"
data += e.data
cmdpackets_seen[e.connection()] = 1
inflate_remain[e.connection()] = ''
else:
data = inflate_remain[e.connection()] + e.data
inflated = inflate[e.connection()].decompress(data)
print(' ' * 7, hexdump(inflated, 8))
e.data = inflated
inflate_remain[e.connection()] = inflate[e.connection()].unconsumed_tail
e.prepend_remaining(remain[e.connection()])
#print e.pp_data(16);
for p in e.packets():
if p.type == 0x32:
c32[p.ack_cmdsource()] = p.simtick()
elif p.type == 0x33:
c33 = p.simtick()
elif p.type == 0x34:
c34 = p.simtick()
elif p.type == 0:
tick += p.simtick()
if p.can_decode():
print(' ', p.decode())
else:
s=' {0:02x} {1: 4d} '.format(p.type, p.len - 3)
print(s, p.pp_data(len(s) + 1))
foo = ""
foo = ''
if c33 < c34:
foo += '<'
elif c33 > c34:
foo += '>'
else:
foo += ' '
if args.t:
print("TICK", ''.join([ str(c32[i]) + ' ' for i in range(0, len(c32)) ]), c33, c34, tick, foo)
remain[e.connection()] = e.remaining()
| gpl-3.0 | 1,441,050,445,854,061,800 | 32.872054 | 195 | 0.48827 | false | 3.387205 | false | false | false |
mpercich/Calendarize | ios/dateparser/lib/python2.7/site-packages/ruamel/yaml/loader.py | 1 | 2063 | # coding: utf-8
from __future__ import absolute_import
from ruamel.yaml.reader import Reader
from ruamel.yaml.scanner import Scanner, RoundTripScanner
from ruamel.yaml.parser import Parser, RoundTripParser
from ruamel.yaml.composer import Composer
from ruamel.yaml.constructor import BaseConstructor, SafeConstructor, Constructor, \
RoundTripConstructor
from ruamel.yaml.resolver import VersionedResolver
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
BaseConstructor.__init__(self)
VersionedResolver.__init__(self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
SafeConstructor.__init__(self)
VersionedResolver.__init__(self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
Constructor.__init__(self)
VersionedResolver.__init__(self)
class RoundTripLoader(Reader, RoundTripScanner, RoundTripParser, Composer,
RoundTripConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
RoundTripScanner.__init__(self)
RoundTripParser.__init__(self)
Composer.__init__(self)
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes)
VersionedResolver.__init__(self, version)
| mit | 5,460,430,173,511,384,000 | 37.203704 | 88 | 0.674746 | false | 4.085149 | false | false | false |
uraxy/qiidly | qiidly/command_line.py | 1 | 2094 | # -*- coding: utf-8 -*-
"""dummy docstring."""
import argparse
from qiidly import __version__, __description__
from qiidly.main import Qiidly
def _arg_parser():
parser = argparse.ArgumentParser(
prog='qiidly',
description=__description__)
parser.add_argument('-V', '--version',
action='version',
version='%(prog)s version {}'.format(__version__))
parser.add_argument('-q', '--qiita-token',
required=True,
help='Qiita access token')
parser.add_argument('-f', '--feedly-token',
required=True,
help='Feedly developer access token')
return parser
# http://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
def _query_yes_no(question, default=None):
valid = {'yes': True, 'y': True,
'no': False, 'n': False}
if default is None:
prompt = ' [y/n] '
elif default == "yes":
prompt = ' [Y/n] '
elif default == 'no':
prompt = ' [y/N] '
else:
raise ValueError("Invalid default answer: '{default}'".format(default=default))
while True:
print(question + prompt, end='')
choice = input().lower()
if choice == '' and default is not None:
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'y' or 'n'.")
def main():
"""dummy."""
args = _arg_parser().parse_args()
for target in ['tags', 'followees']:
q = Qiidly(args.qiita_token, args.feedly_token, target=target)
have_to_sync = q.have_to_sync()
q.print_todo()
if not have_to_sync:
print('Already up-to-date.')
print()
continue
# sync to Feedly
print('')
if not _query_yes_no('Sync to Feedly?', default=None):
print('Did nothing.')
continue
q.sync()
print('Done!')
if __name__ == '__main__':
main()
| mit | -3,202,998,623,235,872,300 | 28.083333 | 89 | 0.523878 | false | 3.996183 | false | false | false |
oneklc/dimod | dimod/views/bqm.py | 1 | 6577 | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =============================================================================
try:
import collections.abc as abc
except ImportError:
import collections as abc
class BQMView(object):
__slots__ = '_adj',
def __init__(self, bqm):
self._adj = bqm._adj
# support python2 pickle
def __getstate__(self):
return {'_adj': self._adj}
# support python2 pickle
def __setstate__(self, state):
self._adj = state['_adj']
class LinearView(BQMView, abc.MutableMapping):
"""Acts as a dictionary `{v: bias, ...}` for the linear biases.
The linear biases are stored in a dict-of-dicts format, where 'self loops'
store the linear biases.
So `{v: bias}` is stored `._adj = {v: {v: Bias(bias)}}`.
If v is not in ._adj[v] then the bias is treated as 0.
"""
def __delitem__(self, v):
if v not in self:
raise KeyError
adj = self._adj
if len(adj[v]) - (v in adj[v]) > 0:
raise ValueError("there are interactions associated with {} that must be deleted first".format(v))
del adj[v]
def __getitem__(self, v):
# developer note: we could try to match the type with other biases in
# the bqm, but I think it is better to just use python int 0 as it
# is most likely to be compatible with other numeric types.
return self._adj[v].get(v, 0)
def __iter__(self):
return iter(self._adj)
def __len__(self):
return len(self._adj)
def __setitem__(self, v, bias):
adj = self._adj
if v in adj:
adj[v][v] = bias
else:
adj[v] = {v: bias}
def __str__(self):
return str(dict(self))
def items(self):
return LinearItemsView(self)
class LinearItemsView(abc.ItemsView):
"""Faster items iteration for LinearView."""
__slots__ = ()
def __iter__(self):
for v, neighbours in self._mapping._adj.items():
# see note in LinearView.__getitem__
yield v, neighbours.get(v, 0)
class QuadraticView(BQMView, abc.MutableMapping):
"""Acts as a dictionary `{(u, v): bias, ...}` for the quadratic biases.
The quadratic biases are stored in a dict-of-dicts format. So `{(u, v): bias}` is stored as
`._adj = {u: {v: Bias(bias)}, v: {u: Bias(bias)}}`.
"""
def __delitem__(self, interaction):
u, v = interaction
if u == v:
raise KeyError('{} is not an interaction'.format(interaction))
adj = self._adj
del adj[v][u]
del adj[u][v]
def __getitem__(self, interaction):
u, v = interaction
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
return self._adj[u][v]
def __iter__(self):
seen = set()
adj = self._adj
for u, neigh in adj.items():
for v in neigh:
if u == v:
# not adjacent to itself
continue
if v not in seen:
yield (u, v)
seen.add(u)
def __len__(self):
# remove the self-loops
return sum(len(neighbours) - (v in neighbours)
for v, neighbours in self._adj.items()) // 2
def __setitem__(self, interaction, bias):
u, v = interaction
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
adj = self._adj
# we don't know what type we want the biases, so we require that the variables already
# exist before we can add an interaction between them
if u not in adj:
raise KeyError('{} is not already a variable in the binary quadratic model'.format(u))
if v not in adj:
raise KeyError('{} is not already a variable in the binary quadratic model'.format(v))
adj[u][v] = adj[v][u] = bias
def __str__(self):
return str(dict(self))
def items(self):
return QuadraticItemsView(self)
class QuadraticItemsView(abc.ItemsView):
"""Faster items iteration"""
__slots__ = ()
def __iter__(self):
adj = self._mapping._adj
for u, v in self._mapping:
yield (u, v), adj[u][v]
class NeighbourView(abc.Mapping):
"""Acts as a dictionary `{u: bias, ...}` for the neighbours of a variable `v`.
See Also:
:class:`AdjacencyView`
"""
__slots__ = '_adj', '_var'
def __init__(self, adj, v):
self._adj = adj
self._var = v
def __getitem__(self, v):
u = self._var
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
return self._adj[u][v]
def __setitem__(self, u, bias):
v = self._var
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
adj = self._adj
if u not in adj:
raise KeyError('{} is not an interaction'.format((u, v)))
adj[v][u] = adj[u][v] = bias
def __iter__(self):
v = self._var
for u in self._adj[v]:
if u != v:
yield u
def __len__(self):
v = self._var
neighbours = self._adj[v]
return len(neighbours) - (v in neighbours) # ignore self
def __str__(self):
return str(dict(self))
class AdjacencyView(BQMView, abc.Mapping):
"""Acts as a dict-of-dicts `{u: {v: bias}, v: {u: bias}}` for the quadratic biases.
The quadratic biases are stored in a dict-of-dicts format. So `{u: {v: bias}, v: {u: bias}}`
is stored as `._adj = {u: {v: Bias(bias)}, v: {u: Bias(bias)}}`.
"""
def __getitem__(self, v):
if v not in self._adj:
raise KeyError('{} is not a variable'.format(v))
return NeighbourView(self._adj, v)
def __iter__(self):
return iter(self._adj)
def __len__(self):
return len(self._adj)
| apache-2.0 | -1,572,494,099,592,310,500 | 27.846491 | 110 | 0.550403 | false | 3.769054 | false | false | false |
51reboot/actual_09_homework | 08/zhaoyong/cmdb/userdb.py | 1 | 5201 | #encoding: utf-8
import json
import gconf
import MySQLdb
from dbutils import execute_fetch_sql
from dbutils import execute_commit_sql
'''
获取用户信息
'''
def get_users():
_columns = ('id','username','password','age')
_sql = 'select * from user'
_count,_rt_list = execute_fetch_sql(_sql)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
print _rt
return _rt
'''
保存用户信息
'''
def save_users(users):
fhandler = open(gconf.USER_FILE, 'wb')
fhandler.write(json.dumps(users))
fhandler.close()
'''
进行用户登录验证
True/False: 用户名和密码验证成功/用户名或密码错误
如果有一个用户的username&password 与输入相同则登录成功
如果所有用户的username&password 与输入不相同则登录失败
'''
def validate_login(username, password):
#_sql = 'select * from user where username="{username}" and password=md5("{password}")'.format(username=username,password=password)
_sql = 'select * from user where username=%s and password=md5(%s)'
_count,_rt_list = execute_fetch_sql(_sql,(username,password))
return _count != 0
'''
验证添加用户的信息
True/False, 描述信息
'''
def validate_add_user(username, password, age):
users = get_users()
for user in users:
if user.get('username') == username:
return False, u'用户名已经存在'
if len(password) < 6:
return False, u'密码长度至少为6位'
if not str(age).isdigit() or int(age) < 0 or int(age) > 100:
return False, u'年龄不正确'
return True, ''
'''
添加用户信息
'''
def add_user(username, password, age):
_sql = 'insert into user(username,password,age) values (%s,md5(%s),%s) '
_args = (username,password,age)
_count = execute_commit_sql(_sql,(username,password,age))
'''
获取用户信息
'''
def get_user(username):
users = get_users()
for user in users:
if user.get('username') == username:
return user
return None
def get_user_id(id,fetch=True):
_columns = ('id','username','password','age')
_sql = 'select * from user where id=%s'
_args = (id)
_count, _rt_list = execute_fetch_sql(_sql,_args)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
return _rt
#get_user_id(19)
'''
验证用户更新
'''
def validate_update_user(username, password, age,*args):
if get_user(username) is None:
return False, u'用户信息不存在'
if len(password) < 6:
return False, u'密码长度至少为6位'
if not str(age).isdigit() or int(age) < 0 or int(age) > 100:
return False, u'年龄不正确'
return True, ''
'''
验证用户更新
'''
def validate_update_user_age(uid, user_age,*args):
if get_user_id(uid) is None:
return False, u'用户信息不存在'
if not str(user_age).isdigit() or int(user_age) <= 0 or int(user_age) > 100:
return False, u'年龄输入错误'
return True, ''
'''
更新用户信息
'''
def update_user(user_age,uid):
_sql = 'update user set age=%s where id=%s'
_args = (user_age,uid)
_count = execute_commit_sql(_sql,_args)
'''
验证用户
'''
def validate_delete_user(uid):
if get_user_id(uid) is None:
return False, u'用户信息不存在'
return True, ''
'''
删除用户信息
'''
def delete_user(uid):
_sql = 'delete from user where id=%s '
_args = (uid)
_count = execute_commit_sql(_sql,_args)
'''
验证用户信息
'''
def validate_charge_user_password(uid,user_password,username,manager_password):
if not validate_login(username,manager_password):
return False,u'管理员密码错误'
if get_user(username) is None:
return False, u'用户信息不存在'
if len(user_password) < 6:
return False, u'密码长度至少为6位'
return True,''
'''
修改用户密码
'''
def charge_user_password(uid,user_password):
_sql = 'update user set password=md5(%s) where id=%s'
_args = (user_password,uid)
_count = execute_commit_sql(_sql, _args)
'''
日志信息显示
'''
def accesslog(topn):
_columns = ('count','url','ip','code')
_sql = 'select * from accesslog limit %s'
_args = (topn)
_count, _rt_list = execute_fetch_sql(_sql,_args)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
return _rt
if __name__ == '__main__':
print accesslog(1)
# update_user('aa','123456',88,18)
#get_userid("aa")
#print get_userid()
#print validate_login('kk', '123456')
#print validate_login('kk', '1234567')
#print validate_login('woniu', '123456')
#username = 'woniu1'
#password = '123456'
#age = '28'
#_is_ok, _error = validate_add_user(username, password, age)
#if _is_ok:
# add_user(username, password, age)
#else:
# print _error
#
#print delete_user('woniu2')
#print validate_update_user('woniu2', password, age)[1]
#print validate_update_user('kk', password, 'ac')[1]
#_is_ok, _error = validate_update_user('kk', password, 30)
#if _is_ok:
# update_user('kk', 'abcdef', 31)
| mit | -8,930,152,261,406,002,000 | 22.043689 | 135 | 0.610912 | false | 2.709475 | false | false | false |
boudewijnrempt/kura | doc/script5.py | 1 | 3804 | import os.path, sys, string, codecs
from kuralib import kuraapp
from kuragui.guiconfig import guiConf
from kuragui import guiconfig
False = 0
True = 1
def splitCSVLine(line):
"""Splits a CSV-formatted line into a list.
See: http://www.colorstudy.com/software/webware/
"""
list = []
position = 0
fieldStart = 0
while 1:
if position >= len(line):
# This only happens when we have a trailing comma
list.append('')
return list
if line[position] == '"':
field = ""
position = position + 1
while 1:
end = string.find(line, '"', position)
if end == -1:
# This indicates a badly-formed CSV file, but
# we'll accept it anyway.
field = line[position:]
position = len(line)
break
if end + 1 < len(line) and line[end + 1] == '"':
field = "%s%s" % (field, line[position:end + 1])
position = end + 2
else:
field = "%s%s" % (field, line[position:end])
position = end + 2
break
else:
end = string.find(line, ",", position)
if end == -1:
list.append(line[position:end])
return list
field = line[position:end]
position = end + 1
list.append(field)
return list
def init():
if guiConf.backend == guiconfig.FILE:
kuraapp.initApp(guiConf.backend,
dbfile = os.path.join(guiConf.filepath,
guiConf.datastore))
elif guiConf.backend == guiconfig.SQL:
if guiConf.username != "":
try:
kuraapp.initApp(guiConf.backend,
username = str(guiConf.username),
database = str(guiConf.database),
password = str(guiConf.password),
hostname = str(guiConf.hostname))
except Exception, e:
print "Error connecting to database: %s" % e
sys.exit(1)
kuraapp.initCurrentEnvironment(guiConf.usernr,
guiConf.languagenr,
guiConf.projectnr)
def main(args):
if len(args) < 2:
print "Usage: python script5.py f1...fn"
sys.exit(1)
init()
for line in codecs.open(args[1], "r", "UTF-8"):
line = splitCSVLine(line)
print "Inserting %s" % line[0]
lexeme = kuraapp.app.createObject("lng_lex", fields={},
form = line[0],
glosse = line[1],
languagenr = guiConf.languagenr,
phonetic_form = line[3],
usernr = guiConf.usernr)
lexeme.insert()
tag = kuraapp.app.createObject("lng_lex_tag", fields={},
lexnr = lexeme.lexnr,
tag = "POS",
value = line[2],
usernr = guiConf.usernr)
tag.insert()
tag = kuraapp.app.createObject("lng_lex_tag",
lexnr = lexeme.lexnr,
tag = "FILE",
value = args[1],
usernr = guiConf.usernr)
tag.insert()
kuraapp.app.saveFile()
if __name__ == "__main__":
main(sys.argv)
| bsd-2-clause | 1,558,941,842,772,906,000 | 36.294118 | 74 | 0.431651 | false | 4.544803 | false | false | false |
elric/virtaal-debian | virtaal/plugins/spellchecker.py | 1 | 13888 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2011 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import logging
import os
import os.path
import re
from gettext import dgettext
import gobject
from virtaal.common import pan_app
from virtaal.controllers.baseplugin import BasePlugin
if not pan_app.DEBUG:
try:
import psyco
except:
psyco = None
else:
psyco = None
_dict_add_re = re.compile('Add "(.*)" to Dictionary')
class Plugin(BasePlugin):
"""A plugin to control spell checking.
It can also download spell checkers on Windows."""
display_name = _('Spell Checker')
description = _('Check spelling and provide suggestions')
version = 0.1
_base_URL = 'http://dictionary.locamotion.org/hunspell/'
_dict_URL = _base_URL + '%s.tar.bz2'
_lang_list = 'languages.txt'
# INITIALIZERS #
def __init__(self, internal_name, main_controller):
self.internal_name = internal_name
# If these imports fail, the plugin is automatically disabled
import gtkspell
import enchant
self.gtkspell = gtkspell
self.enchant = enchant
# languages that we've handled before:
self._seen_languages = {}
# languages supported by enchant:
self._enchant_languages = self.enchant.list_languages()
# HTTP clients (Windows only)
self.clients = {}
# downloadable languages (Windows only)
self.languages = set()
unit_view = main_controller.unit_controller.view
self.unit_view = unit_view
self._connect_id = self.unit_view.connect('textview-language-changed', self._on_unit_lang_changed)
self._textbox_ids = []
self._unitview_ids = []
# For some reason the i18n of gtkspell doesn't work on Windows, so we
# intervene. We also don't want the Languages submenu, so we remove it.
if unit_view.sources:
self._connect_to_textboxes(unit_view, unit_view.sources)
srclang = main_controller.lang_controller.source_lang.code
for textview in unit_view.sources:
self._on_unit_lang_changed(unit_view, textview, srclang)
else:
self._unitview_ids.append(unit_view.connect('sources-created', self._connect_to_textboxes))
if unit_view.targets:
self._connect_to_textboxes(unit_view, unit_view.targets)
tgtlang = main_controller.lang_controller.target_lang.code
for textview in unit_view.targets:
self._on_unit_lang_changed(unit_view, textview, tgtlang)
else:
self._unitview_ids.append(unit_view.connect('targets-created', self._connect_to_textboxes))
def destroy(self):
"""Remove signal connections and disable spell checking."""
for id in self._unitview_ids:
self.unit_view.disconnect(id)
for textbox, id in self._textbox_ids:
textbox.disconnect(id)
if getattr(self, '_connect_id', None):
self.unit_view.disconnect(self._connect_id)
for text_view in self.unit_view.sources + self.unit_view.targets:
self._disable_checking(text_view)
def _connect_to_textboxes(self, unitview, textboxes):
for textbox in textboxes:
self._textbox_ids.append((
textbox,
textbox.connect('populate-popup', self._on_populate_popup)
))
# METHODS #
def _build_client(self, url, clients_id, callback, error_callback=None):
from virtaal.support.httpclient import HTTPClient
client = HTTPClient()
client.set_virtaal_useragent()
self.clients[clients_id] = client
if logging.root.level != logging.DEBUG:
client.get(url, callback)
else:
def error_log(request, result):
logging.debug('Could not get %s: status %d' % (url, request.status))
client.get(url, callback, error_callback=error_log)
def _download_checker(self, language):
"""A Windows-only way to obtain new dictionaries."""
if 'APPDATA' not in os.environ:
# We won't have an idea of where to save it, so let's give up now
return
if language in self.clients:
# We already tried earlier, or started the process
return
if not self.languages:
if self._lang_list not in self.clients:
# We don't yet have a list of available languages
url = self._base_URL + self._lang_list #index page listing all the dictionaries
callback = lambda *args: self._process_index(language=language, *args)
self._build_client(url, self._lang_list, callback)
# self._process_index will call this again, so we can exit
return
language_to_download = None
# People almost definitely want 'en_US' for 'en', so let's ensure
# that we get that right:
if language == 'en':
language_to_download = 'en_US'
self.clients[language] = None
else:
# Let's see if a dictionary is available for this language:
for l in self.languages:
if l == language or l.startswith(language+'_'):
self.clients[language] = None
logging.debug("Will use %s to spell check %s", l, language)
language_to_download = l
break
else:
# No dictionary available
# Indicate that we never need to try this language:
logging.debug("Found no suitable language for spell checking")
self.clients[language] = None
return
# Now download the actual files after we have determined that it is
# available
callback = lambda *args: self._process_tarball(language=language, *args)
url = self._dict_URL % language_to_download
self._build_client(url, language, callback)
def _tar_ok(self, tar):
# TODO: Verify that the tarball is ok:
# - only two files
# - must be .aff and .dic
# - language codes should be sane
# - file sizes should be ok
# - no directory structure
return True
def _ensure_dir(self, dir):
if not os.path.isdir(dir):
os.makedirs(dir)
def _process_index(self, request, result, language=None):
"""Process the list of languages."""
if request.status == 200 and not self.languages:
self.languages = set(result.split())
self._download_checker(language)
else:
logging.debug("Couldn't get list of spell checkers")
#TODO: disable plugin
def _process_tarball(self, request, result, language=None):
# Indicate that we already tried and shouldn't try again later:
self.clients[language] = None
if request.status == 200:
logging.debug('Got a dictionary')
from cStringIO import StringIO
import tarfile
file_obj = StringIO(result)
tar = tarfile.open(fileobj=file_obj)
if not self._tar_ok(tar):
return
DICTDIR = os.path.join(os.environ['APPDATA'], 'enchant', 'myspell')
self._ensure_dir(DICTDIR)
tar.extractall(DICTDIR)
self._seen_languages.pop(language, None)
self._enchant_languages = self.enchant.list_languages()
self.unit_view.update_languages()
else:
logging.debug("Couldn't get a dictionary. Status code: %d" % (request.status))
def _disable_checking(self, text_view):
"""Disable checking on the given text_view."""
if getattr(text_view, 'spell_lang', 'xxxx') is None:
# No change necessary - already disabled
return
spell = None
try:
spell = self.gtkspell.get_from_text_view(text_view)
except SystemError, e:
# At least on Mandriva .get_from_text_view() sometimes returns
# a SystemError without a description. Things seem to work fine
# anyway, so let's ignore it and hope for the best.
pass
if not spell is None:
spell.detach()
text_view.spell_lang = None
if psyco:
psyco.cannotcompile(_disable_checking)
# SIGNAL HANDLERS #
def _on_unit_lang_changed(self, unit_view, text_view, language):
if not self.gtkspell:
return
if language == 'en':
language = 'en_US'
if not language in self._seen_languages and not self.enchant.dict_exists(language):
# Sometimes enchants *wants* a country code, other times it does not.
# For the cases where it requires one, we look for the first language
# code that enchant supports and use that one.
for code in self._enchant_languages:
if code.startswith(language+'_'):
self._seen_languages[language] = code
language = code
break
else:
#logging.debug('No code in enchant.list_languages() that starts with "%s"' % (language))
# If we are on Windows, let's try to download a spell checker:
if os.name == 'nt':
self._download_checker(language)
# If we get it, it will only be activated asynchronously
# later
#TODO: packagekit on Linux?
# We couldn't find a dictionary for "language", so we should make sure that we don't
# have a spell checker for a different language on the text view. See bug 717.
self._disable_checking(text_view)
self._seen_languages[language] = None
return
language = self._seen_languages.get(language, language)
if language is None:
self._disable_checking(text_view)
return
if getattr(text_view, 'spell_lang', None) == language:
# No change necessary - already enabled
return
gobject.idle_add(self._activate_checker, text_view, language, priority=gobject.PRIORITY_LOW)
def _activate_checker(self, text_view, language):
# All the expensive stuff in here called on idle. We mush also isolate
# this away from psyco
try:
spell = None
try:
spell = self.gtkspell.get_from_text_view(text_view)
except SystemError, e:
# At least on Mandriva .get_from_text_view() sometimes returns
# a SystemError without a description. Things seem to work fine
# anyway, so let's ignore it and hope for the best.
pass
if spell is None:
spell = self.gtkspell.Spell(text_view, language)
else:
spell.set_language(language)
spell.recheck_all()
text_view.spell_lang = language
except Exception, e:
logging.exception("Could not initialize spell checking", e)
self.gtkspell = None
#TODO: unload plugin
if psyco:
# Some of the gtkspell stuff can't work with psyco and will dump core
# if we don't avoid psyco compilation
psyco.cannotcompile(_activate_checker)
def _on_populate_popup(self, textbox, menu):
# We can't work with the menu immediately, since gtkspell only adds its
# entries in the event handler.
gobject.idle_add(self._fix_menu, menu)
def _fix_menu(self, menu):
_entries_above_separator = False
_now_remove_separator = False
for item in menu:
if item.get_name() == 'GtkSeparatorMenuItem':
if not _entries_above_separator:
menu.remove(item)
break
label = item.get_property('label')
# For some reason the i18n of gtkspell doesn't work on Windows, so
# we intervene.
if label == "<i>(no suggestions)</i>":
#l10n: This refers to spell checking
item.set_property('label', _("<i>(no suggestions)</i>"))
if label == "Ignore All":
#l10n: This refers to spell checking
item.set_property('label', _("Ignore All"))
if label == "More...":
#l10n: This refers to spelling suggestions
item.set_property('label', _("More..."))
m = _dict_add_re.match(label)
if m:
word = m.group(1)
#l10n: This refers to the spell checking dictionary
item.set_property('label', _('Add "%s" to Dictionary') % word)
# We don't want a language selector - we have our own
if label in dgettext('gtkspell', 'Languages'):
menu.remove(item)
if not _entries_above_separator:
_now_remove_separator = True
continue
_entries_above_separator = True
| gpl-2.0 | -2,843,363,457,551,456,000 | 38.566952 | 106 | 0.589502 | false | 4.232856 | false | false | false |
endlessm/chromium-browser | third_party/catapult/dashboard/dashboard/common/request_handler.py | 1 | 4410 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Simple Request handler using Jinja2 templates."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import os
import jinja2
import webapp2
from google.appengine.api import users
from dashboard.common import utils
from dashboard.common import xsrf
_DASHBOARD_PYTHON_DIR = os.path.dirname(os.path.dirname(__file__))
JINJA2_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(
[os.path.join(_DASHBOARD_PYTHON_DIR, 'templates')]),
# Security team suggests that autoescaping be enabled.
autoescape=True,
extensions=['jinja2.ext.autoescape'])
class RequestHandler(webapp2.RequestHandler):
"""Base class for requests. Does common template and error handling tasks."""
def RenderHtml(self, template_file, template_values, status=200):
"""Renders HTML given template and values.
Args:
template_file: string. File name under templates directory.
template_values: dict. Mapping of template variables to corresponding.
values.
status: int. HTTP status code.
"""
self.response.set_status(status)
template = JINJA2_ENVIRONMENT.get_template(template_file)
self.GetDynamicVariables(template_values)
self.response.out.write(template.render(template_values))
def RenderStaticHtml(self, filename):
filename = os.path.join(_DASHBOARD_PYTHON_DIR, 'static', filename)
contents = open(filename, 'r')
self.response.out.write(contents.read())
contents.close()
def GetDynamicVariables(self, template_values, request_path=None):
"""Gets the values that vary for every page.
Args:
template_values: dict of name/value pairs.
request_path: path for login urls, None if using the current path.
"""
user_info = ''
xsrf_token = ''
user = users.get_current_user()
display_username = 'Sign in'
title = 'Sign in to an account'
is_admin = False
if user:
display_username = user.email()
title = 'Switch user'
xsrf_token = xsrf.GenerateToken(user)
is_admin = users.is_current_user_admin()
try:
login_url = users.create_login_url(request_path or self.request.path_qs)
except users.RedirectTooLongError:
# On the bug filing pages, the full login URL can be too long. Drop
# the correct redirect URL, since the user should already be logged in at
# this point anyway.
login_url = users.create_login_url('/')
user_info = '<a href="%s" title="%s">%s</a>' % (
login_url, title, display_username)
# Force out of passive login, as it creates multilogin issues.
login_url = login_url.replace('passive=true', 'passive=false')
template_values['login_url'] = login_url
template_values['display_username'] = display_username
template_values['user_info'] = user_info
template_values['is_admin'] = is_admin
template_values['is_internal_user'] = utils.IsInternalUser()
template_values['xsrf_token'] = xsrf_token
template_values['xsrf_input'] = (
'<input type="hidden" name="xsrf_token" value="%s">' % xsrf_token)
template_values['login_url'] = login_url
return template_values
def ReportError(self, error_message, status=500):
"""Reports the given error to the client and logs the error.
Args:
error_message: The message to log and send to the client.
status: The HTTP response code to use.
"""
logging.error('Reporting error: %r', error_message)
self.response.set_status(status)
self.response.out.write('%s\nrequest_id:%s\n' %
(error_message, utils.GetRequestId()))
def ReportWarning(self, warning_message, status=200):
"""Reports a warning to the client and logs the warning.
Args:
warning_message: The warning message to log (as an error).
status: The http response code to use.
"""
logging.warning('Reporting warning: %r', warning_message)
self.response.set_status(status)
self.response.out.write('%s\nrequest_id:%s\n' %
(warning_message, utils.GetRequestId()))
class InvalidInputError(Exception):
"""An error class for invalid user input query parameter values."""
pass
| bsd-3-clause | -7,051,379,980,682,448,000 | 35.446281 | 79 | 0.685488 | false | 3.848168 | false | false | false |
duguhaotian/superscp | src/superscp_tool.py | 1 | 2572 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import os
import sys
import subprocess
from pathmanager import node
from pathmanager import link
from pathmanager import paths
from pathmanager import tool
def superscp(argv):
if len(argv) != 5:
usage()
return
src = argv[2]
tip = argv[3]
tdir = argv[4]
srcnid = None
ips = tool.get_ips()
if len(ips) > 1:
print("---------------------------------------")
keys = ips.keys()
i = 0
for key in keys:
print("%d. %s" % (i, ips[key]))
i += 1
print("---------------------------------------")
select = input("which ip use to scp, select the index: ")
print("you select ip is : %s" % ips[keys[select]] )
srcnid = keys[select]
elif len(ips) < 1:
print("no ether for scp")
return
else:
srcnid = ips.keys()[0]
srcnid = tool.get_mac(srcnid)
srcnode = node.get_node(srcnid)
if srcnode == None:
print("current host is not register")
return
print(srcnode.show())
tnodes = node.find_by_ip(tip)
tnode = None
if len(tnodes) > 1:
i = 0
print("***********************************")
for tmp in tnodes:
print("%d. %s" % (i, tmp.show()))
i += 1
print("***********************************")
select = input("which target ip use to scp, select the index: ")
tnode = tnodes[select]
elif len(tnodes) < 1:
print("can not find target node by target ip : %s" % tip)
return
else:
tnode = tnodes[0]
print(tnode.show())
idxs = paths.search_by_target(srcnode, tnode)
path = None
if len(idxs) > 1:
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
i = 0
for idx in idxs:
print("%d. %s" % (i, paths.get(idx).show()))
i += 1
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
select = input("select one path to scp, which index you chose: ")
path = paths.get(idxs[i])
elif len(idxs) < 1:
print("cannot find sourceip: %s to targetip: %s path" % (srcnode.nip, tnode.nip))
return
else:
path = paths.get(idxs[0])
rdir=os.path.split(os.path.realpath(__file__))[0]
scpfname = rdir + "/scptool/.data.superscp"
paths.generate_scp_data(path, scpfname)
cmdstr = rdir+"/scptool/magic.sh " + src + " " + tdir
rts = subprocess.check_output(cmdstr, shell=True).decode().strip()
print("magic return: %s", rts)
| apache-2.0 | -9,122,752,600,971,258,000 | 27.577778 | 89 | 0.485226 | false | 3.485095 | false | false | false |
birocorneliu/conference | lib/to_delete.py | 1 | 2829 | from datetime import datetime
import endpoints
from google.appengine.ext import ndb
from google.appengine.api import taskqueue, memcache
from lib.db import Profile, Conference
from lib.models import ConflictException, ProfileForm, BooleanMessage, ConferenceForm, TeeShirtSize
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = '%s %s' % (
'Last chance to attend! The following conferences '
'are nearly sold out:',
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
| apache-2.0 | 8,938,905,317,155,928,000 | 35.269231 | 99 | 0.666313 | false | 4.041429 | false | false | false |
aWhereAPI/API-Code-Samples | python/header.py | 1 | 7933 | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import str
from builtins import bytes
from future import standard_library
standard_library.install_aliases()
from builtins import object
import requests as rq
import base64
import pprint
import json
import random
from menus import Menus
class AWhereAPI(object):
def __init__(self, api_key, api_secret):
"""
Initializes the AWhereAPI class, which is used to perform HTTP requests
to the aWhere V2 API.
Docs:
http://developer.awhere.com/api/reference
"""
self._fields_url = 'https://api.awhere.com/v2/fields'
self._weather_url = 'https://api.awhere.com/v2/weather/fields'
self.api_key = api_key
self.api_secret = api_secret
self.base_64_encoded_secret_key = self.encode_secret_and_key(
self.api_key, self.api_secret)
self.auth_token = self.get_oauth_token(self.base_64_encoded_secret_key)
self._menu = Menus()
def create_test_field(self):
"""
Performs a HTTP POST request to create and add a Field to your aWhere App.AWhereAPI
Docs:
http://developer.awhere.com/api/reference/fields/create-field
"""
# Each Field requires a unique ID
testField = 'TestField-'
testField += str(random.randint(1, 999))
# Next, we build the request body. Please refer to the docs above for
# more info.
fieldBody = {'id': testField,
'name': testField,
'farmId': 'Farm1Test',
'centerPoint': {'latitude': 39.82,
'longitude': -98.56},
'acres': 100}
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
"Content-Type": 'application/json'
}
# Perform the POST request to create your Field
print('Attempting to create new field....\n')
response = rq.post(self._fields_url,
headers=auth_headers,
json=fieldBody)
# A successful request will return a 201 status code
print('The server responded with a status code of %d \n' %
response.status_code)
pprint.pprint(response.json())
print('\n\n\n')
if response.status_code == 201:
print(
'Your field "{0}" was successfully created!'.format(testField))
else:
print('An error occurred. Please review the above resonse and try again.')
def delete_field_by_id(self, field_id):
"""
Performs a HTTP DELETE request to delete a Field from your aWhere App.
Docs: http://developer.awhere.com/api/reference/fields/delete-field
Args:
field_id: The field to be deleted
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
"Content-Type": 'application/json'
}
# Perform the POST request to Delete your Field
response = rq.delete(self._fields_url + '/{0}'.format(field_id),
headers=auth_headers)
print('The server responded with a status code of %d' %
response.status_code)
def encode_secret_and_key(self, key, secret):
"""
Docs:
http://developer.awhere.com/api/authentication
Returns:
Returns the base64-encoded {key}:{secret} combination, seperated by a colon.
"""
# Base64 Encode the Secret and Key
key_secret = '%s:%s' % (key, secret)
#print('\nKey and Secret before Base64 Encoding: %s' % key_secret)
encoded_key_secret = base64.b64encode(
bytes(key_secret, 'utf-8')).decode('ascii')
#print('Key and Secret after Base64 Encoding: %s' % encoded_key_secret)
return encoded_key_secret
def get_fields(self):
"""
Performs a HTTP GET request to obtain all Fields you've created on your aWhere App.
Docs:
http://developer.awhere.com/api/reference/fields/get-fields
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
}
# Perform the HTTP request to obtain a list of all Fields
fields_response = rq.get(self._fields_url,
headers=auth_headers)
responseJSON = fields_response.json()
# Display the count of Fields the user has on their account
print('You have %s fields registered on your account' %
len(responseJSON["fields"]))
# Iterate over the fields and display their name and ID
print('{0} {1} \t\t {2}'.format('#', 'Field Name', 'Field ID'))
print('-------------------------------------------')
count = 0
for field in responseJSON["fields"]:
count += 1
print('{0}. {1} \t {2}\r'.format(
count, field["name"], field["id"]))
def get_weather_by_id(self, field_id):
"""
Performs a HTTP GET request to obtain Forecast, Historical Norms and Forecasts
Docs:
1. Forecast: http://developer.awhere.com/api/forecast-weather-api
2. Historical Norms: http://developer.awhere.com/api/reference/weather/norms
3. Observations: http://developer.awhere.com/api/reference/weather/observations
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
}
# Perform the HTTP request to obtain the Forecast for the Field
response = rq.get(self._weather_url + '/{0}/forecasts?blockSize=24'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Forecast API endpoint shows the forecast for your field location ({0}).'.format(field_id))
self._menu.os_pause()
# Next, let's obtain the historic norms for a Field
response = rq.get(self._weather_url + '/{0}/norms/04-04'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Norms API endpoint shows the averages of the last 10 for an arbitrary date, April 4th.')
self._menu.os_pause()
# Finally, display the observed weather. Returns the last 7 days of data for the provided Field.
response = rq.get(self._weather_url + '/{0}/observations'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Observed Weather API endpoint shows the last 7 days of data for the provided field ({0})'.format(field_id))
def get_oauth_token(self, encoded_key_secret):
"""
Demonstrates how to make a HTTP POST request to obtain an OAuth Token
Docs:
http://developer.awhere.com/api/authentication
Returns:
The access token provided by the aWhere API
"""
auth_url = 'https://api.awhere.com/oauth/token'
auth_headers = {
"Authorization": "Basic %s" % encoded_key_secret,
'Content-Type': 'application/x-www-form-urlencoded'
}
body = "grant_type=client_credentials"
response = rq.post(auth_url,
headers=auth_headers,
data=body)
# .json method is a requests lib method that decodes the response
return response.json()['access_token']
| mit | -6,602,144,733,192,833,000 | 38.272277 | 152 | 0.581117 | false | 4.190703 | true | false | false |
clucas111/delineating-linear-elements | Code/clf_preprocessing.py | 1 | 1569 | # -*- coding: utf-8 -*-
"""
@author: Chris Lucas
"""
import numpy as np
import pandas as pd
def merge_dataframes(dfs, key_field_name):
"""
Merges dataframes containing data of one class into one dataframe with
the class in a column.
Parameters
----------
dfs : dict of DataFrames
Dictionary with the class as key and the value as the dataframes
to be merged.
Returns
-------
df : DataFrame
The merged dataframe.
"""
df = pd.DataFrame()
for k, v in dfs.iteritems():
v[key_field_name] = k
df = df.append(v)
return df
def correlated_features(df, features, corr_th=0.98):
"""
Determines highly correlated features which can consequently be dropped.
Parameters
----------
df : DataFrame
The feature values.
features : list of strings
The names of the features (column names in the dataframe)
to be checked.
corr_th : float
The correlation coefficient threshold to determine what is highly
correlated.
Returns
-------
drops : list fo strings
The names of the features which can be dropped.
"""
df_corr = df[features].astype(np.float64).corr(method='pearson')
mask = np.ones(df_corr.columns.size) - np.eye(df_corr.columns.size)
df_corr = mask * df_corr
drops = []
for col in df_corr.columns.values:
if not np.in1d([col], drops):
corr = df_corr[abs(df_corr[col]) > corr_th].index
drops = np.union1d(drops, corr)
return drops
| apache-2.0 | -1,480,821,867,940,271,600 | 23.515625 | 76 | 0.605481 | false | 3.817518 | false | false | false |
jeffery-do/Vizdoombot | examples/python/scenarios.py | 1 | 2862 | #!/usr/bin/env python
#####################################################################
# This script presents how to run some scenarios.
# Configuration is loaded from "../../examples/config/<SCENARIO_NAME>.cfg" file.
# <episodes> number of episodes are played.
# Random combination of buttons is chosen for every action.
# Game variables from state and last reward are printed.
#
# To see the scenario description go to "../../scenarios/README.md"
#####################################################################
from __future__ import print_function
import itertools as it
from random import choice
from time import sleep
from vizdoom import DoomGame, ScreenResolution
game = DoomGame()
# Choose scenario config file you wish to watch.
# Don't load two configs cause the second will overrite the first one.
# Multiple config files are ok but combining these ones doesn't make much sense.
game.load_config("../../examples/config/basic.cfg")
# game.load_config("../../examples/config/simpler_basic.cfg")
# game.load_config("../../examples/config/rocket_basic.cfg")
# game.load_config("../../examples/config/deadly_corridor.cfg")
# game.load_config("../../examples/config/deathmatch.cfg")
# game.load_config("../../examples/config/defend_the_center.cfg")
# game.load_config("../../examples/config/defend_the_line.cfg")
# game.load_config("../../examples/config/health_gathering.cfg")
# game.load_config("../../examples/config/my_way_home.cfg")
# game.load_config("../../examples/config/predict_position.cfg")
# game.load_config("../../examples/config/take_cover.cfg")
# Makes the screen bigger to see more details.
game.set_screen_resolution(ScreenResolution.RES_640X480)
game.set_window_visible(True)
game.init()
# Creates all possible actions depending on how many buttons there are.
actions_num = game.get_available_buttons_size()
actions = []
for perm in it.product([False, True], repeat=actions_num):
actions.append(list(perm))
episodes = 10
sleep_time = 0.028
for i in range(episodes):
print("Episode #" + str(i + 1))
# Not needed for the first episode but the loop is nicer.
game.new_episode()
while not game.is_episode_finished():
# Gets the state and possibly to something with it
state = game.get_state()
# Makes a random action and save the reward.
reward = game.make_action(choice(actions))
print("State #" + str(state.number))
print("Game Variables:", state.game_variables)
print("Performed action:", game.get_last_action())
print("Last Reward:", reward)
print("=====================")
# Sleep some time because processing is too fast to watch.
if sleep_time > 0:
sleep(sleep_time)
print("Episode finished!")
print("total reward:", game.get_total_reward())
print("************************")
| mit | 2,760,447,843,961,944,000 | 36.168831 | 80 | 0.650245 | false | 3.846774 | true | false | false |
dannyperry571/theapprentice | script.module.nanscrapers/lib/nanscrapers/scraperplugins/sezonlukdizi.py | 1 | 4411 | import json
import re
import urlparse
import requests
from BeautifulSoup import BeautifulSoup
from nanscrapers.common import random_agent, replaceHTMLCodes
from ..scraper import Scraper
import xbmc
class Sezonluldizi(Scraper):
domains = ['sezonlukdizi.com']
name = "sezonlukdizi"
def __init__(self):
self.base_link = 'http://sezonlukdizi.com'
def scrape_episode(self, title, show_year, year, season, episode, imdb, tvdb):
url_title = title.replace(' ', '-').replace('.', '-').replace(":","").replace("!","").replace("?","").lower()
episode_url = '/%s/%01d-sezon-%01d-bolum.html' % (url_title, int(season), int(episode))
return self.sources(replaceHTMLCodes(episode_url))
def sources(self, url):
sources = []
try:
if url == None: return sources
absolute_url = urlparse.urljoin(self.base_link, url)
headers = {'User-Agent': random_agent()}
html = BeautifulSoup(requests.get(absolute_url, headers=headers, timeout=30).content)
pages = []
embed = html.findAll('div', attrs={'id': 'embed'})[0]
pages.append(embed.findAll('iframe')[0]["src"])
for page in pages:
try:
if not page.startswith('http'):
page = 'http:%s' % page
html = BeautifulSoup(requests.get(page, headers=headers, timeout=30).content)
# captions = html.findAll(text=re.compile('kind\s*:\s*(?:\'|\")captions(?:\'|\")'))
# if not captions: break
try:
link_text = html.findAll(text=re.compile('url\s*:\s*\'(http(?:s|)://api.pcloud.com/.+?)\''))[0]
link = re.findall('url\s*:\s*\'(http(?:s|)://api.pcloud.com/.+?)\'', link_text)[0]
variants = json.loads(requests.get(link, headers=headers, timeout=30).content)['variants']
for variant in variants:
if 'hosts' in variant and 'path' in variant and 'height' in variant:
video_url = '%s%s' % (variant['hosts'][0], variant['path'])
heigth = variant['height']
if not video_url.startswith('http'):
video_url = 'http://%s' % video_url
sources.append(
{'source': 'cdn', 'quality': str(heigth), 'scraper': self.name, 'url': video_url,
'direct': False})
except:
pass
try:
links_text = html.findAll(
text=re.compile('"?file"?\s*:\s*"(.+?)"\s*,\s*"?label"?\s*:\s*"(.+?)"'))
if len(links_text) > 0:
for link_text in links_text:
try:
links = re.findall('"?file"?\s*:\s*"([^"]+)"\s*,\s*"?label"?\s*:\s*"(\d+)p?[^"]*"',
link_text)
for link in links:
video_url = link[0]
if not video_url.startswith('http'):
video_url = 'http:%s' % video_url
try:
req = requests.head(video_url, headers=headers)
if req.headers['Location'] != "":
video_url = req.headers['Location']
except:
pass
quality = link[1]
sources.append(
{'source': 'google video', 'quality': quality, 'scraper': self.name,
'url': video_url, 'direct': True})
except:
continue
except:
pass
except:
pass
except:
pass
return sources
| gpl-2.0 | -8,988,593,595,415,021,000 | 45.431579 | 119 | 0.407844 | false | 4.737916 | false | false | false |
hortonworks/hortonworks-sandbox | desktop/core/ext-py/Twisted/doc/web/howto/listings/PicturePile/picturepile.py | 1 | 1917 | """Run this with twistd -y."""
import os
from twisted.application import service, internet
from twisted.web.woven import page
from twisted.web import server, static
rootDirectory = os.path.expanduser("~/Pictures")
class DirectoryListing(page.Page):
templateFile = "directory-listing.html"
templateDirectory = os.path.split(os.path.abspath(__file__))[0]
def initialize(self, *args, **kwargs):
self.directory = kwargs['directory']
def wmfactory_title(self, request):
return self.directory
def wmfactory_directory(self, request):
files = os.listdir(self.directory)
for i in xrange(len(files)):
if os.path.isdir(os.path.join(self.directory,files[i])):
files[i] = files[i] + '/'
return files
def getDynamicChild(self, name, request):
# Protect against malicious URLs like '..'
if static.isDangerous(name):
return static.dangerousPathError
# Return a DirectoryListing or an ImageDisplay resource, depending on
# whether the path corresponds to a directory or to a file
path = os.path.join(self.directory,name)
if os.path.exists(path):
if os.path.isdir(path):
return DirectoryListing(directory=path)
else:
return ImageDisplay(image=path)
class ImageDisplay(page.Page):
templateFile="image-display.html"
templateDirectory = os.path.split(os.path.abspath(__file__))[0]
def initialize(self, *args, **kwargs):
self.image = kwargs['image']
def wmfactory_image(self, request):
return self.image
def wchild_preview(self, request):
return static.File(self.image)
site = server.Site(DirectoryListing(directory=rootDirectory))
application = service.Application("ImagePool")
parent = service.IServiceCollection(application)
internet.TCPServer(8088, site).setServiceParent(parent)
| apache-2.0 | 396,252,148,935,259,800 | 30.95 | 75 | 0.674491 | false | 3.952577 | false | false | false |
MeteorKepler/RICGA | ricga/ops/image_processing.py | 1 | 8197 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for image preprocessing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from ricga.reference.tf2keras_image_process import tf2keras_image
def distort_image(image, thread_id):
"""Perform random distortions on an image.
Args:
image: A float32 Tensor of shape [height, width, 3] with values in [0, 1).
thread_id: Preprocessing thread id used to select the ordering of color
distortions. There should be a multiple of 2 preprocessing threads.
Returns:
distorted_image: A float32 Tensor of shape [height, width, 3] with values in
[0, 1].
"""
# Randomly flip horizontally.
with tf.name_scope("flip_horizontal", values=[image]):
image = tf.image.random_flip_left_right(image)
# Randomly distort the colors based on thread id.
color_ordering = thread_id % 2
with tf.name_scope("distort_color", values=[image]):
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.032)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
elif color_ordering == 1:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.032)
# The random_* ops do not necessarily clamp.
image = tf.clip_by_value(image, 0.0, 1.0)
return image
def process_image(encoded_image,
is_training,
height,
width,
ssd_model,
resize_height=346,
resize_width=346,
thread_id=0,
image_format="jpeg"):
"""Decode an image, resize and apply random distortions.
In training, images are distorted slightly differently depending on thread_id.
Args:
encoded_image: String Tensor containing the image.
is_training: Boolean; whether preprocessing for training or eval.
height: Height of the output image.
width: Width of the output image.
ssd_model: SSD300 model.
resize_height: If > 0, resize height before crop to final dimensions.
resize_width: If > 0, resize width before crop to final dimensions.
thread_id: Preprocessing thread id used to select the ordering of color
distortions. There should be a multiple of 2 preprocessing threads.
image_format: "jpeg" or "png".
Returns:
A float32 Tensor of shape [height, width, 3] with values in [-1, 1].
Raises:
ValueError: If image_format is invalid.
"""
# Helper function to log an image summary to the visualizer. Summaries are
# only logged in half of the thread.
def image_summary(name, image_to_sum):
if thread_id % 2 == 0:
tf.summary.image(name, tf.expand_dims(image_to_sum, 0))
# Decode image into a float32 Tensor of shape [?, ?, 3] with values in [0, 1).
with tf.name_scope("decode", values=[encoded_image]):
if image_format == "jpeg":
image = tf.image.decode_jpeg(encoded_image, channels=3)
elif image_format == "png":
image = tf.image.decode_png(encoded_image, channels=3)
else:
raise ValueError("Invalid image format: %s" % image_format)
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
original_image = image
image_summary("original_image", image)
# Resize image.
assert (resize_height > 0) == (resize_width > 0)
if resize_height:
image = tf.image.resize_images(image,
size=[resize_height, resize_width],
method=tf.image.ResizeMethod.BILINEAR)
# Crop to final dimensions.
if is_training:
image = tf.random_crop(image, [height, width, 3])
else:
# Central crop, assuming resize_height > height, resize_width > width.
image = tf.image.resize_image_with_crop_or_pad(image, height, width)
image_summary("resized_image", image)
# Randomly distort the image.
if is_training:
image = distort_image(image, thread_id)
image_summary("final_image", image)
# Rescale to [-1,1] instead of [0, 1]
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
# ssd process
image_300x300 = tf.image.resize_images(original_image, [300, 300])
image_300x300_ssd_input = tf2keras_image(image_300x300)
# with tf.variable_scope("ssd"):
ssd_output = ssd_model(tf.expand_dims(image_300x300_ssd_input, 0))[0]
with tf.variable_scope("ssd_out_processing"):
mbox_loc = ssd_output[:, :4]
variances = ssd_output[:, -4:]
mbox_priorbox = ssd_output[:, -8:-4]
mbox_conf = ssd_output[:, 4:-8]
prior_width = mbox_priorbox[:, 2] - mbox_priorbox[:, 0]
prior_height = mbox_priorbox[:, 3] - mbox_priorbox[:, 1]
prior_center_x = 0.5 * (mbox_priorbox[:, 2] + mbox_priorbox[:, 0])
prior_center_y = 0.5 * (mbox_priorbox[:, 3] + mbox_priorbox[:, 1])
decode_bbox_center_x = mbox_loc[:, 0] * prior_width * variances[:, 0]
decode_bbox_center_x += prior_center_x
decode_bbox_center_y = mbox_loc[:, 1] * prior_width * variances[:, 1]
decode_bbox_center_y += prior_center_y
decode_bbox_width = tf.exp(mbox_loc[:, 2] * variances[:, 2])
decode_bbox_width *= prior_width
decode_bbox_height = tf.exp(mbox_loc[:, 3] * variances[:, 3])
decode_bbox_height *= prior_height
decode_bbox_xmin = tf.expand_dims(decode_bbox_center_x - 0.5 * decode_bbox_width, -1)
decode_bbox_ymin = tf.expand_dims(decode_bbox_center_y - 0.5 * decode_bbox_height, -1)
decode_bbox_xmax = tf.expand_dims(decode_bbox_center_x + 0.5 * decode_bbox_width, -1)
decode_bbox_ymax = tf.expand_dims(decode_bbox_center_y + 0.5 * decode_bbox_height, -1)
decode_bbox = tf.concat((decode_bbox_ymin,
decode_bbox_xmax,
decode_bbox_ymax,
decode_bbox_xmin), axis=-1)
decode_bbox = tf.minimum(tf.maximum(decode_bbox, 0.0), 1.0)
mbox_conf_without_background = tf.slice(mbox_conf, [0, 1], [-1, -1])
mbox_conf_max = tf.reduce_max(mbox_conf_without_background, 1)
idx = tf.image.non_max_suppression(decode_bbox, mbox_conf_max, max_output_size=1)
idx = tf.reshape(idx, [1])
good_box = decode_bbox[idx[0]]
region_image = tf.image.crop_and_resize(tf.expand_dims(image_300x300, 0),
boxes=tf.expand_dims(good_box, 0),
box_ind=tf.constant([0], dtype=tf.int32),
crop_size=[height, width],
name="region_images")[0]
image_summary("region_image", region_image)
# Rescale to [-1,1] instead of [0, 1]
region_image = tf.subtract(region_image, 0.5)
region_image = tf.multiply(region_image, 2.0)
return image, region_image
# return ssd, region_image
| apache-2.0 | -7,595,954,824,494,623,000 | 41.91623 | 94 | 0.605831 | false | 3.609423 | false | false | false |
MaximeBiset/care4care | main/migrations/0040_auto_20141205_1736.py | 1 | 12414 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import multiselectfield.db.fields
import django.core.validators
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0039_merge'),
]
operations = [
migrations.AlterField(
model_name='contact',
name='comments',
field=models.CharField(max_length=255, verbose_name='Additional comments', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='email',
field=models.EmailField(max_length=75, verbose_name='Email address', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='relationship',
field=models.CharField(max_length=255, verbose_name='Your relationship with that person', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='order',
field=models.IntegerField(choices=[(1, 'First contact'), (2, 'Contact'), (3, 'Last contact')], default=0, verbose_name='Priority'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='birth_date',
field=models.DateField(verbose_name='Birthday', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='credit',
field=models.IntegerField(default=0, verbose_name='Remaining credit'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=75, verbose_name='Email address'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='how_found',
field=multiselectfield.db.fields.MultiSelectField(max_length=41, verbose_name='How did you hear about care4care ?', choices=[('internet', 'The Internet'), ('show', 'A presentation, brochure, flyer,... '), ('branch', 'The local branch'), ('member', 'Another member'), ('friends', 'Friends or family'), ('other', 'Other')]),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='status',
field=models.IntegerField(choices=[(1, 'Active'), (2, 'On vacation'), (3, 'Disabled')], default=1),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='user_type',
field=models.IntegerField(choices=[(1, 'Member'), (2, 'Non-member'), (3, 'Verified member')], verbose_name='Account type', default=1, help_text='A member can help or be helped while a non-member is a professional who registers to access patient data. Please choose the one that suits you'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(max_length=30, unique=True, verbose_name='Username', validators=[django.core.validators.RegexValidator(re.compile('^[\\w.@+-]+$', 32), 'Enter a valid username. No more than 30 characters. There may be numbers andcharacters @/./+/-/_', 'invalid')]),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='criminal_record',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Criminal record'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='recomendation_letter_1',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Letter of recommendation n°1'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='recomendation_letter_2',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Letter de recommendation n°2'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='additional_info',
field=models.TextField(max_length=300, verbose_name='Additional information', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='can_wheelchair',
field=models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], verbose_name='Can you carry a wheelchair in your car?', default=False),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='drive_license',
field=multiselectfield.db.fields.MultiSelectField(max_length=11, verbose_name='Type of driving license', choices=[(1, 'Moped'), (2, 'Motorcycle'), (3, 'Car'), (4, 'Truck'), (5, 'Bus'), (6, 'Tractor')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='have_car',
field=models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], verbose_name='Do you have a car?', default=False),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='hobbies',
field=models.TextField(max_length=200, verbose_name='Your hobby', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='mail_preferences',
field=models.IntegerField(choices=[(1, 'Message box'), (2, 'Mail')], default=1, verbose_name='Receive my messages'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='offered_job',
field=multiselectfield.db.fields.MultiSelectField(max_length=21, verbose_name='What jobs you want to do?', choices=[('1', 'Visit home'), ('2', 'Companionship'), ('3', 'Transport by car'), ('4', 'Shopping'), ('5', 'House sitting'), ('6', 'Manual jobs'), ('7', 'Gardening'), ('8', 'Pet sitting'), ('9', 'Personal care'), ('a', 'Administrative'), ('b', 'Other')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='receive_help_from_who',
field=models.IntegerField(choices=[(5, 'All'), (3, 'Verified member'), (6, 'My favorite members')], default=5, verbose_name='Receive offers and demands'),
preserve_default=True,
),
]
| agpl-3.0 | -4,895,663,956,325,399,000 | 47.29572 | 385 | 0.584757 | false | 4.21317 | false | false | false |
martinbalsam/timing-rp | timingrp/sanit.py | 1 | 2984 | import re
import argparse
"""
This script sanitize the raw data experted from "Timing.app",
it adds the proper double quotes around the "Path" attribute,
and it removes unwanted double quotes inside the "Path" attribute,
that may yield an unwanted escape of the field.
--- TODO ---
speedup:
as for now I'm iterating twice over the whole list,
Maybe the sanitizing can be done in a single regex, but I spent waay to much time
coming up with these. I'll leave it to somebody else (noone)
cleanup:
delete unused files
"""
parser = argparse.ArgumentParser(description="writes the input data into a sanitized .csv file")
parser.add_argument('path', nargs=1, type = str, help='the path of the input raw .csv file to parse')
args = parser.parse_args()
"""paths of the input and output data
each file has the same name structure:
file.csv
file_tmp_.csv
file_san_.csv (THIS IS THE OUTPUT WE WANT, SANITIZED AND SHIT)
file_err_.log
"""
input_data = args.path[0]
temp_data = args.path[0][:-4]+"_tmp_.csv"
output_data = args.path[0][:-4]+"_san_.csv"
errors_log = args.path[0][:-4]+"_err_.log"
#THIS SHIT WORKS IF THERE ARE NO UNEXPECTED BREAKLINE
errors = open(errors_log,'w')
with open(input_data,'r') as original:
with open(temp_data,'w') as new:
#writes the csv header
new.write(original.readline())
for line in original:
#regex to isolate the 'path' attribute
matches = re.search('(^[^,]+,)(.*)(,\d{2}/\d{2}/\d{2} \d{2}:\d{2},\d{2}/\d{2}/\d{2} \d{2}:\d{2},.*$)', line)
try:
#add quotation around the path attribute and writes it in a new file
new.write(matches.group(1)+'"'+matches.group(2)+'"'+matches.group(3)+'\n')
#catches lines that don't match the regex and writes them in an errors.log file
except AttributeError:
errors.write(line)
continue
#Now I recheck the whole list to catch if there are extra double quotation signs (") in the path attribute,
#if so, we delete them
with open(temp_data,'r') as old:
with open(output_data,'w') as new:
new.write(old.readline())
for line in old:
#regex that catches any path that contains one or more double quotation sign (")
matches = re.search('(^[^,]+,")(.*".*)(",\d{2}/\d{2}/\d{2} \d{2}:\d{2},\d{2}/\d{2}/\d{2} \d{2}:\d{2},.*$)', line)
if matches is not None:
#deletes any double quotation mark (") and writes tha sanitized line in a new file
new.write(matches.group(1)+matches.group(2).replace('"','')+matches.group(3)+'\n')
#if the line is ok, it just writes the line in the new file
else:
new.write(line)
#populate a panda DataFrame object with the data, and the proper datetime objects
"""dateparse = lambda x: pd.datetime.strptime(x, '%d/%m/%y %H:%M')
a = pd.read_csv('bin/fine.csv', parse_dates=[2,3], date_parser=dateparse)
"""
| gpl-2.0 | -1,916,926,826,659,169,300 | 39.324324 | 125 | 0.629692 | false | 3.337808 | false | false | false |
chromium/chromium | build/android/gyp/native_libraries_template.py | 7 | 1781 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
NATIVE_LIBRARIES_TEMPLATE = """\
// This file is autogenerated by
// build/android/gyp/write_native_libraries_java.py
// Please do not change its content.
package org.chromium.build;
public class NativeLibraries {{
public static final int CPU_FAMILY_UNKNOWN = 0;
public static final int CPU_FAMILY_ARM = 1;
public static final int CPU_FAMILY_MIPS = 2;
public static final int CPU_FAMILY_X86 = 3;
// Set to true to enable the use of the Chromium Linker.
public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
// This is the list of native libraries to be loaded (in the correct order)
// by LibraryLoader.java.
// TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java
// directly. The two ways of library loading should be refactored into one.
public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
// This is the expected version of the 'main' native library, which is the one that
// implements the initial set of base JNI functions including
// base::android::nativeGetVersionName()
// TODO(torne): This is public to work around classloader issues in Trichrome
// where NativeLibraries is not in the same dex as LibraryLoader.
// We should instead split up Java code along package boundaries.
public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER};
public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
}}
"""
| bsd-3-clause | -5,940,693,984,739,795,000 | 44.666667 | 87 | 0.730488 | false | 3.710417 | false | false | false |
shobhitmishra/CodingProblems | LeetCode/Session3/BinaryTreePaths.py | 1 | 1149 | from typing import List
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def binaryTreePaths(self, root: TreeNode) -> List[str]:
result = []
if not root:
return result
self.binaryTreePathsHelper(root, [], result)
return result
def binaryTreePathsHelper(self, root, pathSoFar, result):
if root:
pathSoFar.append(str(root.val))
if not root.left and not root.right:
path = "->".join(pathSoFar)
result.append(path)
self.binaryTreePathsHelper(root.left, pathSoFar, result)
self.binaryTreePathsHelper(root.right, pathSoFar, result)
pathSoFar.pop()
root = TreeNode(10)
root.left = TreeNode(3)
root.left.left = TreeNode(2)
root.left.right = TreeNode(8)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(9)
root.right = TreeNode(15)
root.right.left = TreeNode(13)
root.right.right = TreeNode(17)
root.right.right.right = TreeNode(19)
ob = Solution()
print(ob.binaryTreePaths(root))
| mit | -7,620,993,149,577,762,000 | 26.357143 | 69 | 0.619669 | false | 3.389381 | false | false | false |
wavelets/ThinkStats2 | code/moments.py | 1 | 1149 | """This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import math
import thinkstats2
def RawMoment(xs, k):
return sum(x**k for x in xs) / float(len(xs))
def CentralMoment(xs, k):
xbar = RawMoment(xs, 1)
return sum((x - xbar)**k for x in xs) / len(xs)
def StandardizedMoment(xs, k):
var = CentralMoment(xs, 2)
sigma = math.sqrt(var)
return CentralMoment(xs, k) / sigma**k
def Skewness(xs):
return StandardizedMoment(xs, 3)
def Median(xs):
cdf = thinkstats2.MakeCdfFromList(xs)
return cdf.Value(0.5)
def PearsonMedianSkewness(xs):
median = Median(xs)
mean = RawMoment(xs, 1)
var = CentralMoment(xs, 2)
std = math.sqrt(var)
gp = 3 * (mean - median) / std
return gp
def main():
xs = range(10)
print 'mean', RawMoment(xs, 1)
print 'median', Median(xs)
print 'var', CentralMoment(xs, 2)
print 'skewness', Skewness(xs)
print 'Pearson skewness', PearsonMedianSkewness(xs)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,409,726,838,714,081,000 | 19.890909 | 55 | 0.646649 | false | 2.729216 | false | false | false |
shinigota/dagpi_splendor | SplendorCode/src/mvc/Display.py | 1 | 32173 | from tkinter import *
from src.element.Card import Card
from src.element.ResourceType import ResourceType
from src.game.GameState import GameState
from src.mvc.EventType import EventType
from src.mvc.GameBoard import GameBoard
from src.mvc.GameRules import GameRules
from src.player.AI import AI
import time
class Display:
window = None
text = "test"
game_rules = None
game_board = None
w = None
h = None
nb_players = 1
false_position = None
def __init__(self):
self.window = Tk()
self.create_image()
def create_window(self):
print('Display -- create_window')
self.window.title(GameRules.game_name)
self.w, self.h = 1900, 1000
self.window.geometry("%dx%d+0+0" % (self.w, self.h))
self.window.config(bg=None)
def make_entry(self, parent, caption, var):
print('Display -- make_entry')
Label(parent, text=caption).pack()
entry = Entry(parent, textvariable=var)
entry.pack()
return entry
def popup_start_click_action(self):
print('Display -- popup_start_click_action')
popup = Toplevel(height=250, width=280)
# popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(popup, text="Sélectionnez vos parametres", height=1,
width=30).pack()
self.user_name = StringVar()
self.user_name.set("Player")
Label(popup, text="Name:").pack()
entry = Entry(popup, textvariable=self.user_name)
entry.pack()
self.nb_players=1
self.players_level = dict()
self.players_canvas = dict()
self.players_position = dict()
self.players_position[self.nb_players] = Variable()
self.players_position[self.nb_players].set(int(self.nb_players))
Label(popup, text="Position:").pack()
entry = Entry(popup,
textvariable=self.players_position[self.nb_players])
entry.pack()
canvas = Canvas(popup, height=20,
width=160, background="grey")
canvas.create_text(82, 10, text="Ajouter un adversaire", fill="black")
canvas.bind("<Button-1>", lambda event,
p=popup,
c=canvas:
self.add_player_click_action(p, c))
canvas.pack()
# self.canvas_validate = Canvas(popup, height=20,
# width=60, background="grey")
# self.canvas_validate.create_text(30, 10, text="Valider",
# fill="black")
# self.canvas_validate.bind("<Button-1>", lambda event,
# p=popup:
# self.validate_popup_action(p))
# self.canvas_validate.pack()
def add_player_click_action(self, popup, canvas):
print('Display -- add_player_click_action')
try:
self.false_position.pack_forget()
except:
pass
if self.nb_players >= 2:
self.canvas_validate.pack_forget()
text_nb_players = self.nb_players
Label(popup, text="%s %s" % ("Adversaire n°", text_nb_players)).pack()
self.nb_players = self.nb_players + 1
self.players_level[self.nb_players] = Variable()
self.players_level[self.nb_players].set(0)
self.players_position[self.nb_players] = Variable()
c = Checkbutton(popup, text="Niveau difficile",
variable=self.players_level[self.nb_players])
c.pack()
self.players_position[self.nb_players].set(int(self.nb_players))
Label(popup, text="Position:").pack()
entry = Entry(popup,
textvariable=self.players_position[self.nb_players])
entry.pack()
self.players_canvas[self.nb_players] = c
if self.nb_players == 4:
self.canvas_validate.pack()
canvas.pack_forget()
elif self.nb_players == 2:
self.canvas_validate = Canvas(popup, height=20,
width=60, background="grey")
self.canvas_validate.create_text(30, 10, text="Valider",
fill="black")
self.canvas_validate.bind("<Button-1>", lambda event,
p=popup:
self.validate_popup_action(p))
self.canvas_validate.pack()
else:
self.canvas_validate.pack()
def validate_popup_action(self, popup):
print('Display -- validate_popup_action')
try:
self.false_position.pack_forget()
except:
pass
accept = True
temp_positions = []
for item in self.players_position:
try:
temp_positions.append(int(self.players_position[item].get()))
except:
accept = False
if accept:
for item in range(1, self.nb_players + 1):
if item not in temp_positions:
accept = False
if accept:
popup.destroy()
final_players = []
for key in range(1, self.nb_players+1):
players_dict = dict()
players_dict["position"] = self.players_position[key].get()
if key == 1:
players_dict["name"] = self.user_name.get()
players_dict["difficulty"] = 0
else:
players_dict["name"] = "IA %d" % key
players_dict["difficulty"] = self.players_level[key].get()
final_players.append(players_dict)
self.game_rules.event(EventType.START, final_players)
else:
self.false_position = Label(popup, text="Positions incorrectes",
fg="red")
self.false_position.pack()
def display_tiles(self):
print('Display -- display_tiles')
i = 1
y = 100
for tile in self.game_board.displayed_tiles:
x = 170 + 120 * (i - 1)
self.display_tile(self.window, x, y, tile, None)
i += 1
def display_tile(self, canvas, x, y, tile, event):
print('Display -- display_tile')
canvas = Canvas(canvas, width=100, height=100,
background='#725202')
canvas.create_image(50, 50, image=self.img_tile)
canvas.create_image(13, 13, image=self.get_image_points(tile.points))
i = 1
for key in tile.gems_conditions:
number = tile.gems_conditions[key]
if number > 0:
textcolor = "white"
if ResourceType.get_color(key) == "white":
textcolor = "black"
if i == 1:
canvas.create_image(27, 40, image=self.get_image_rect_gem(
key))
txtBuy1 = canvas.create_text(25, 40, text=number,
fill=textcolor)
i = 2
elif i == 2:
canvas.create_image(27, 80, image=self.get_image_rect_gem(
key))
txtBuy2 = canvas.create_text(25, 80, text=number,
fill=textcolor)
i = 3
elif i == 3:
canvas.create_image(77, 80, image=self.get_image_rect_gem(
key))
txtBuy3 = canvas.create_text(75, 80, text=number,
fill=textcolor)
i = 0
canvas.place(x=x, y=y)
if event is not None:
canvas.bind("<Button-1>", lambda event, e=event,
t=tile: self.game_rules.event(e,
t))
def display_cards(self):
print('Display -- display_cards')
for lvl in range(1, int(self.game_rules.nb_lvl_card) + 1):
i = 1
for card in self.game_board.displayed_cards[lvl]:
x = 170 + 120 * (i - 1)
y = 490 - (130 * (lvl - 1))
self.display_card(self.window, x, y, card,
EventType.CLICK_DISPLAYED_CARD)
i += 1
def display_card(self, canvas, x, y, card, event):
print('Display -- display_card')
canvas = Canvas(canvas, width=100, height=120,
background=self.get_color(int(card.level)))
canvas.create_image(50, 75, image=self.get_image_card_gem(
card.income_gem))
canvas.create_image(15, 20, image=self.get_image_points(card.points))
i = 1
for key in card.purchase_gems:
number = card.purchase_gems[key]
if number > 0:
textcolor = "white"
if ResourceType.get_color(key) == "white":
textcolor = "black"
if i == 1:
canvas.create_image(25, 100,
image=self.get_image_circle_gem(key))
txtBuy1 = canvas.create_text(25, 100, text=number,
fill=textcolor)
i = 2
elif i == 2:
canvas.create_image(75, 100,
image=self.get_image_circle_gem(key))
txtBuy2 = canvas.create_text(75, 100, text=number,
fill=textcolor)
i = 3
elif i == 3:
canvas.create_image(25, 60,
image=self.get_image_circle_gem(key))
txtBuy3 = canvas.create_text(25, 60, text=number,
fill=textcolor)
i = 4
elif i == 4:
canvas.create_image(75, 60,
image=self.get_image_circle_gem(key))
txtBuy4 = canvas.create_text(75, 60, text=number,
fill=textcolor)
i = 0
canvas.place(x=x,
y=y)
if event is not None:
canvas.bind("<Button-1>",
lambda event, e=event,
c=card: self.game_rules.event(e, c))
def display_stacks(self):
print('Display -- display_stacks')
for i in range(1, int(self.game_rules.nb_lvl_card) + 1):
self.display_stack(i, self.game_board.is_deck_empty(i))
def display_stack(self, level, empty):
print('Display -- display_stack')
color = Display.get_color(level)
if empty:
color = "grey"
canvas = Canvas(self.window, width=100, height=120)
canvas.create_image(50, 60, image=self.get_image_deck(level, empty))
canvas.place(x=50, y=490 - (130 * (level - 1)))
canvas.bind("<Button-1>", lambda event, e=EventType.CLICK_DECK_CARD,
l=level: self.game_rules.event(e, l))
def display_bank(self, bank):
print('Display -- display_bank')
x = 70
y = 650
for token in ResourceType.get_sorted_resources():
if token == "Gold":
self.display_gold(self.window, 70, 115, bank[token])
else:
self.display_gem(self.window, x, y, bank[token], token)
x += 100
def display_gold(self, canvas, x, y, nb):
print('Display -- display_gold')
canvas = Canvas(canvas, width=80, height=80)
canvas.create_image(40, 40, image=self.get_image_token_gem("Gold"))
canvas.create_image(40, 40, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
def display_gem(self, canvas, x, y, nb, gem):
print('Display -- display_gem')
canvas = Canvas(canvas, width=80, height=80)
canvas.create_image(40, 40, image=self.get_image_token_gem(gem))
canvas.create_image(40, 40, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_TAKE_TOKEN_GAMEBOARD,
g=gem: self.game_rules.event(e, g))
###################### Display hand of player #################################
def display_players(self):
print('Display -- display_players')
x = 1300
y = 40
for player in self.game_board.players:
if type(player) == AI:
self.display_player_ia(x, y, player)
y += 280
else:
self.display_player_human(player)
def display_player_human(self, player):
print('Display -- display_player_human')
color = "grey"
if self.game_board.get_current_player() == player:
color = "orange"
canvas = Canvas(self.window, width=500, height=270,
highlightbackground=color)
self.display_player_bank(canvas, 100, 10, player)
canvas.create_text(50, 45, text=player.nickname, fill="black")
canvas.create_text(50, 65, text=str(player.calculate_total_points()) +
" / "
"%d" %
self.game_rules.nb_points_end,
fill="black")
y = 130
i = 1
for card in player.reserved_cards:
x = 10 + 120 * (i - 1)
self.display_card(canvas, x, y, card, EventType.RESERVE_PURCHASE)
i += 1
self.display_player_tile(canvas, 370, 140, player)
canvas.place(x=750, y=320)
def display_player_ia(self, x, y, player):
print('Display -- display_player_ia')
color = "grey"
if self.game_board.get_current_player() == player:
color = "orange"
canvas = Canvas(self.window, width=500, height=270,
highlightbackground=color)
canvas.place(x=x, y=y)
self.display_player_bank(canvas, 100, 10, player)
canvas.create_text(50, 45, text=player.nickname, fill="black")
canvas.create_text(50, 65, text=str(player.calculate_total_points()) +
" / "
"%d" %
self.game_rules.nb_points_end,
fill="black")
y = 130
i = 1
for card in player.reserved_cards:
x = 10 + 120 * (i - 1)
self.display_card_ia(canvas, x, y, card.level)
i += 1
self.display_player_tile(canvas, 370, 140, player)
def display_card_ia(self, canvas, x, y, level):
print('Display -- display_card_ia')
color = Display.get_color(level)
canvas = Canvas(canvas, width=100, height=120)
canvas.create_image(50, 60, image=self.get_image_deck(level, False))
canvas.place(x=x, y=y)
def display_player_tile(self, canvas, x, y, player):
print('Display -- display_player_tile')
canvas = Canvas(canvas, width=100, height=100,
background='#725202')
canvas.create_image(50, 50, image=self.img_tile)
canvas.create_image(50, 50, image=self.get_image_points(
len(player.owned_tiles)))
canvas.place(x=x, y=y)
def display_player_bank(self, canvas, x, y, player):
print('Display -- display_player_bank')
canvas = Canvas(canvas, width=390, height=120)
canvas.place(x=x, y=y)
x = 0
y = 60
for token in ResourceType.get_sorted_resources():
if token == "Gold":
self.display_player_gold(canvas, 320, 30, player.bank[token])
else:
self.display_player_gem(canvas, x, y, player.bank[token],
token)
x += 60
x = 0
y = 0
for token in ResourceType.get_sorted_resources():
if token == "Gold":
pass
else:
self.display_player_income_card(canvas, x, y,
player.get_card_income()[
token],
token)
x += 60
def display_player_gold(self, canvas, x, y, nb):
print('Display -- display_player_gold')
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(30, 30,
image=self.get_image_token_bank_gem("Gold"))
canvas.create_image(30, 30, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_GIVE_BACK_PLAYER_TOKEN,
g="Gold": self.game_rules.event(e, g))
def display_player_gem(self, canvas, x, y, nb, gem):
print('Display -- display_player_gem')
color = "white"
if ResourceType.get_color(gem) == "white":
color = "black"
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(30, 30, image=self.get_image_token_bank_gem(gem))
canvas.create_image(30, 30, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_GIVE_BACK_PLAYER_TOKEN,
g=gem: self.game_rules.event(e, g))
def display_player_income_card(self, canvas, x, y, nb, gem):
print('Display -- display_player_income_card')
color = "white"
if ResourceType.get_color(gem) == "white":
color = "black"
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(35, 30, image=self.get_image_rect_bank_gem(gem))
canvas.create_text(30, 30, text=nb, fill=color)
canvas.place(x=x, y=y)
def display_text_help(self):
print('Display -- display_text_help')
canvas = Canvas(self.window, width=500, height=70)
canvas.create_text(100, 30, text=self.game_board.help_text)
canvas.place(x=0, y=0)
def popup_select_card_action(self, isreserved, ispurchase, card):
print('Display -- opup_select_card_action')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=250, width=280)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(self.popup, text="Selectionnez votre action :", height=1,
width=30).place(x=40, y=10)
self.display_card(self.popup, 90, 50, card, None)
if isreserved:
canvas = Canvas(self.popup, height=20,
width=60, background="grey")
canvas.create_text(30, 10, text="Reserver", fill="black")
canvas.bind("<Button-1>", lambda event,
e=EventType.POPUP_RESERVE,
c=card:
self.click_on_popup(e, c))
canvas.place(x=60, y=200)
if ispurchase:
canvas = Canvas(self.popup, height=20,
width=60, background="grey")
canvas.create_text(30, 10, text="Acheter", fill="black")
canvas.bind("<Button-1>", lambda event,
e=EventType.POPUP_PURCHASE,
c=card:
self.click_on_popup(e, c))
canvas.place(x=160, y=200)
def popup_select_tile_action(self, tiles):
print('Display -- popup_select_tile_action')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=170, width=565)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(self.popup, text="Selectionnez votre Noble:", height=1,
width=30).place(x=180, y=10)
x = 10
y = 50
for tile in tiles:
self.display_tile(self.popup, x, y, tile, EventType.CLICK_TILE)
x += 110
def popup_txt(self, txt):
print('Display -- popup_txt')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=300, width=260)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
label = Label(self.popup,
text=txt, height=7,
width=30)
label.place(x=20, y=50)
def click_on_popup(self, event, objet):
print('Display -- click_on_popup')
self.popup.destroy()
# GameState.toggle_modal(False)
self.game_rules.event(event, objet)
def on_exit(self):
print('Display -- on_exit')
self.game_rules.event(EventType.CLOSE_POPUP, None)
self.popup.destroy()
def create_image(self):
print('Display -- create_image')
self.img_bg = PhotoImage(file='../res/bakground.gif')
self.img_button = PhotoImage(file='../res/Button.gif')
self.img0 = PhotoImage(file='../res/0.gif')
self.img0 = self.img0.subsample(3, 3)
self.img1 = PhotoImage(file='../res/1.gif')
self.img1 = self.img1.subsample(3, 3)
self.img2 = PhotoImage(file='../res/2.gif')
self.img2 = self.img2.subsample(3, 3)
self.img3 = PhotoImage(file='../res/3.gif')
self.img3 = self.img3.subsample(3, 3)
self.img4 = PhotoImage(file='../res/4.gif')
self.img4 = self.img4.subsample(3, 3)
self.img5 = PhotoImage(file='../res/5.gif')
self.img5 = self.img5.subsample(3, 3)
self.img6 = PhotoImage(file='../res/6.gif')
self.img6 = self.img6.subsample(3, 3)
self.img7 = PhotoImage(file='../res/7.gif')
self.img7 = self.img7.subsample(3, 3)
self.img_card_D = PhotoImage(file='../res/card_diamant.gif')
self.img_card_D = self.img_card_D.subsample(5, 5)
self.img_card_E = PhotoImage(file='../res/card_emeraude.gif')
self.img_card_E = self.img_card_E.subsample(5, 5)
self.img_card_O = PhotoImage(file='../res/card_onyx.gif')
self.img_card_O = self.img_card_O.subsample(5, 5)
self.img_card_R = PhotoImage(file='../res/card_rubis.gif')
self.img_card_R = self.img_card_R.subsample(5, 5)
self.img_card_S = PhotoImage(file='../res/card_saphir.gif')
self.img_card_S = self.img_card_S.subsample(5, 5)
self.img_circle_D = PhotoImage(file='../res/white_circle.gif')
self.img_circle_D = self.img_circle_D.subsample(2, 2)
self.img_circle_E = PhotoImage(file='../res/green_circle.gif')
self.img_circle_E = self.img_circle_E.subsample(2, 2)
self.img_circle_O = PhotoImage(file='../res/black_circle.gif')
self.img_circle_O = self.img_circle_O.subsample(2, 2)
self.img_circle_R = PhotoImage(file='../res/red_circle.gif')
self.img_circle_R = self.img_circle_R.subsample(2, 2)
self.img_circle_S = PhotoImage(file='../res/blue_circle.gif')
self.img_circle_S = self.img_circle_S.subsample(2, 2)
self.img_rect_D = PhotoImage(file='../res/white_rect.gif')
self.img_rect_D = self.img_rect_D.subsample(2, 2)
self.img_rect_E = PhotoImage(file='../res/green_rect.gif')
self.img_rect_E = self.img_rect_E.subsample(2, 2)
self.img_rect_O = PhotoImage(file='../res/black_rect.gif')
self.img_rect_O = self.img_rect_O.subsample(2, 2)
self.img_rect_R = PhotoImage(file='../res/red_rect.gif')
self.img_rect_R = self.img_rect_R.subsample(2, 2)
self.img_rect_S = PhotoImage(file='../res/blue_rect.gif')
self.img_rect_S = self.img_rect_S.subsample(2, 2)
self.img_rect_bank_D = PhotoImage(file='../res/white_rect.gif')
self.img_rect_bank_E = PhotoImage(file='../res/green_rect.gif')
self.img_rect_bank_O = PhotoImage(file='../res/black_rect.gif')
self.img_rect_bank_R = PhotoImage(file='../res/red_rect.gif')
self.img_rect_bank_S = PhotoImage(file='../res/blue_rect.gif')
self.img_token_D = PhotoImage(file='../res/token_diamant.gif')
self.img_token_D = self.img_token_D.subsample(3, 3)
self.img_token_E = PhotoImage(file='../res/token_emeraude.gif')
self.img_token_E = self.img_token_E.subsample(3, 3)
self.img_token_R = PhotoImage(file='../res/token_rubis.gif')
self.img_token_R = self.img_token_R.subsample(3, 3)
self.img_token_S = PhotoImage(file='../res/token_saphir.gif')
self.img_token_S = self.img_token_S.subsample(3, 3)
self.img_token_O = PhotoImage(file='../res/token_onyx.gif')
self.img_token_O = self.img_token_O.subsample(3, 3)
self.img_token_G = PhotoImage(file='../res/token_gold.gif')
self.img_token_G = self.img_token_G.subsample(3, 3)
self.img_token_bank_D = PhotoImage(file='../res/token_diamant.gif')
self.img_token_bank_D = self.img_token_bank_D.subsample(4, 4)
self.img_token_bank_E = PhotoImage(file='../res/token_emeraude.gif')
self.img_token_bank_E = self.img_token_bank_E.subsample(4, 4)
self.img_token_bank_R = PhotoImage(file='../res/token_rubis.gif')
self.img_token_bank_R = self.img_token_bank_R.subsample(4, 4)
self.img_token_bank_S = PhotoImage(file='../res/token_saphir.gif')
self.img_token_bank_S = self.img_token_bank_S.subsample(4, 4)
self.img_token_bank_O = PhotoImage(file='../res/token_onyx.gif')
self.img_token_bank_O = self.img_token_bank_O.subsample(4, 4)
self.img_token_bank_G = PhotoImage(file='../res/token_gold.gif')
self.img_token_bank_G = self.img_token_bank_G.subsample(4, 4)
self.img_deck_1 = PhotoImage(file='../res/deck_lvl1.gif')
self.img_deck_1 = self.img_deck_1.subsample(3, 3)
self.img_deck_empty_1 = PhotoImage(file='../res/deck_lvl1_empty.gif')
self.img_deck_empty_1 = self.img_deck_empty_1.subsample(7, 7)
self.img_deck_2 = PhotoImage(file='../res/deck_lvl2.gif')
self.img_deck_2 = self.img_deck_2.subsample(3, 3)
self.img_deck_empty_2 = PhotoImage(file='../res/deck_lvl2_empty.gif')
self.img_deck_empty_2 = self.img_deck_empty_2.subsample(3, 3)
self.img_deck_3 = PhotoImage(file='../res/deck_lvl3.gif')
self.img_deck_3 = self.img_deck_3.subsample(3, 3)
self.img_deck_empty_3 = PhotoImage(file='../res/deck_lvl3_empty.gif')
self.img_deck_empty_3 = self.img_deck_empty_3.subsample(3, 3)
self.img_tile = PhotoImage(file='../res/tuile.gif')
self.img_tile = self.img_tile.subsample(1, 1)
def get_image_points(self, points):
print('Display -- get_image_points')
if points == 0:
return self.img0
elif points == 1:
return self.img1
elif points == 2:
return self.img2
elif points == 3:
return self.img3
elif points == 4:
return self.img4
elif points == 5:
return self.img5
elif points == 6:
return self.img6
elif points == 7:
return self.img7
def get_image_card_gem(self, gem):
print('Display -- get_image_card_gem')
if gem == "Diamond":
return self.img_card_D
elif gem == "Emerald":
return self.img_card_E
elif gem == "Sapphire":
return self.img_card_S
elif gem == "Onyx":
return self.img_card_O
elif gem == "Ruby":
return self.img_card_R
def get_image_deck(self, lvl, empty):
print('Display -- get_image_deck')
if lvl == 1:
if empty:
return self.img_deck_empty_1
else:
return self.img_deck_1
elif lvl == 2:
if empty:
return self.img_deck_empty_2
else:
return self.img_deck_2
elif lvl == 3:
if empty:
return self.img_deck_empty_3
else:
return self.img_deck_3
def get_image_circle_gem(self, gem):
print('Display -- get_image_circle_gem')
if gem == "Diamond":
return self.img_circle_D
elif gem == "Emerald":
return self.img_circle_E
elif gem == "Sapphire":
return self.img_circle_S
elif gem == "Onyx":
return self.img_circle_O
elif gem == "Ruby":
return self.img_circle_R
def get_image_rect_gem(self, gem):
print('Display -- get_image_rect_gem')
if gem == "Diamond":
return self.img_rect_D
elif gem == "Emerald":
return self.img_rect_E
elif gem == "Sapphire":
return self.img_rect_S
elif gem == "Onyx":
return self.img_rect_O
elif gem == "Ruby":
return self.img_rect_R
def get_image_token_gem(self, gem):
print('Display -- get_image_token_gem')
if gem == "Diamond":
return self.img_token_D
elif gem == "Emerald":
return self.img_token_E
elif gem == "Sapphire":
return self.img_token_S
elif gem == "Onyx":
return self.img_token_O
elif gem == "Ruby":
return self.img_token_R
elif gem == "Gold":
return self.img_token_G
def get_image_rect_bank_gem(self, gem):
print('Display -- get_image_rect_bank_gem')
if gem == "Diamond":
return self.img_rect_bank_D
elif gem == "Emerald":
return self.img_rect_bank_E
elif gem == "Sapphire":
return self.img_rect_bank_S
elif gem == "Onyx":
return self.img_rect_bank_O
elif gem == "Ruby":
return self.img_rect_bank_R
def get_image_token_bank_gem(self, gem):
print('Display -- get_image_token_bank_gem')
if gem == "Diamond":
return self.img_token_bank_D
elif gem == "Emerald":
return self.img_token_bank_E
elif gem == "Sapphire":
return self.img_token_bank_S
elif gem == "Onyx":
return self.img_token_bank_O
elif gem == "Ruby":
return self.img_token_bank_R
elif gem == "Gold":
return self.img_token_bank_G
@staticmethod
def get_color(level):
print('Display -- get_color')
if level == 1:
color = '#0483f9'
if level == 2:
color = '#05e002'
if level == 3:
color = '#ffac07'
return color
def refresh(self):
print('Display -- refresh')
canvas = Canvas(self.window, height=self.h, width=self.w)
canvas.place(x=0, y=0)
self.display_bank(self.game_board.bank)
self.display_stacks()
self.display_cards()
self.display_tiles()
self.display_players()
self.display_text_help()
def launch(self):
print('Display -- launch')
canvas = Canvas(self.window, height=self.h, width=self.w)
canvas.create_image(1000, 500, image=self.img_bg)
button_quit = Canvas(self.window, height=100, width=300)
button_start = Canvas(self.window, height=100, width=300)
button_start.create_image(151, 52, image=self.img_button)
button_quit.create_image(151, 52, image=self.img_button)
button_quit.place(x=1400, y=500)
button_start.place(x=300, y=500)
canvas.place(x=0, y=0)
button_start.create_text(150, 50, text='Start', fill='gold')
button_quit.create_text(150, 50, text='Quit', fill='gold')
button_quit.bind("<Button-1>", lambda a, b=None: self.window.destroy())
button_start.bind("<Button-1>",
lambda a, b=None: self.popup_start_click_action())
| gpl-3.0 | 5,044,571,538,574,761,000 | 40.564599 | 83 | 0.527711 | false | 3.562285 | false | false | false |
CINPLA/exana | exana/tracking/fields.py | 1 | 32391 | import numpy as np
def spatial_rate_map(x, y, t, spike_train, binsize=0.01, box_xlen=1,
box_ylen=1, mask_unvisited=True, convolve=True,
return_bins=False, smoothing=0.02):
"""Divide a 2D space in bins of size binsize**2, count the number of spikes
in each bin and divide by the time spent in respective bins. The map can
then be convolved with a gaussian kernel of size csize determined by the
smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : neo.SpikeTrain
x : float
1d vector of x positions
y : float
1d vector of y positions
t : float
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : quantities scalar in m
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins, ybins
"""
if not all([len(var) == len(var2) for var in [x,y,t] for var2 in [x,y,t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError('box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.append(t, t[-1] + np.median(np.diff(t)))
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
spike_pos = np.zeros((xbins.size, ybins.size))
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
spike_pos[ix[n], iy[n]] += spikes_in_bin[n]
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map
spike_pos = spike_pos[1:, 1:]
time_pos = time_pos[1:, 1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def gridness(rate_map, box_xlen, box_ylen, return_acorr=False,
step_size=0.1, method='iter', return_masked_acorr=False):
'''Calculates gridness of a rate map. Calculates the normalized
autocorrelation (A) of a rate map B where A is given as
A = 1/n\Sum_{x,y}(B - \bar{B})^{2}/\sigma_{B}^{2}. Further, the Pearsson's
product-moment correlation coefficients is calculated between A and A_{rot}
rotated 30 and 60 degrees. Finally the gridness is calculated as the
difference between the minimum of coefficients at 60 degrees and the
maximum of coefficients at 30 degrees i.e. gridness = min(r60) - max(r30).
If the method 'iter' is chosen:
In order to focus the analysis on symmetry of A the the central and the
outer part of the gridness is maximized by increasingly mask A at steps of
``step_size``.
If the method 'puncture' is chosen:
This is the standard way of calculating gridness, by masking the central
autocorrelation bump, in addition to rounding the map. See examples.
Parameters
----------
rate_map : numpy.ndarray
box_xlen : float
side length of quadratic box
step_size : float
step size in masking, only applies to the method "iter"
return_acorr : bool
return autocorrelation map or not
return_masked_acorr : bool
return masked autocorrelation map or not
method : 'iter' or 'puncture'
Returns
-------
out : gridness, (autocorrelation map, masked autocorrelation map)
Examples
--------
>>> from exana.tracking.tools import make_test_grid_rate_map
>>> import matplotlib.pyplot as plt
>>> rate_map, pos = make_test_grid_rate_map()
>>> iter_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='iter')
>>> print('%.2f' % iter_score)
1.39
>>> puncture_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='puncture')
>>> print('%.2f' % puncture_score)
0.96
.. plot::
import matplotlib.pyplot as plt
import numpy as np
from exana.tracking.tools import make_test_grid_rate_map
from exana.tracking import gridness
import matplotlib.pyplot as plt
rate_map, _ = make_test_grid_rate_map()
fig, axs = plt.subplots(2, 2)
g1, acorr, m_acorr1 = gridness(rate_map, box_xlen=1,
box_ylen=1, return_acorr=True,
return_masked_acorr=True,
method='iter')
g2, m_acorr2 = gridness(rate_map, box_xlen=1,
box_ylen=1,
return_masked_acorr=True,
method='puncture')
mats = [rate_map, m_acorr1, acorr, m_acorr2]
titles = ['Rate map', 'Masked acorr "iter", gridness = %.2f' % g1,
'Autocorrelation',
'Masked acorr "puncture", gridness = %.2f' % g2]
for ax, mat, title in zip(axs.ravel(), mats, titles):
ax.imshow(mat)
ax.set_title(title)
plt.tight_layout()
plt.show()
'''
import numpy.ma as ma
from exana.misc.tools import fftcorrelate2d
from exana.tracking.tools import gaussian2D
from scipy.optimize import curve_fit
tmp_map = rate_map.copy()
tmp_map[~np.isfinite(tmp_map)] = 0
acorr = fftcorrelate2d(tmp_map, tmp_map, mode='full', normalize=True)
rows, cols = acorr.shape
b_x = np.linspace(- box_xlen / 2., box_xlen / 2., rows)
b_y = np.linspace(- box_ylen / 2., box_ylen / 2., cols)
B_x, B_y = np.meshgrid(b_x, b_y)
if method == 'iter':
if return_masked_acorr: m_acorrs = []
gridscores = []
for outer in np.arange(box_xlen / 4, box_xlen / 2, step_size):
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > outer)
for inner in np.arange(0, box_xlen / 4, step_size):
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < inner)
r30, r60 = rotate_corr(m_acorr)
gridscores.append(np.min(r60) - np.max(r30))
if return_masked_acorr: m_acorrs.append(m_acorr)
gridscore = max(gridscores)
if return_masked_acorr: m_acorr = m_acorrs[gridscores.index(gridscore)]
elif method == 'puncture':
# round picture edges
_gaussian = lambda pos, a, s: gaussian2D(a, pos[0], pos[1], 0, 0, s).ravel()
p0 = (max(acorr.ravel()), min(box_xlen, box_ylen) / 100)
popt, pcov = curve_fit(_gaussian, (B_x, B_y), acorr.ravel(), p0=p0)
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > min(box_xlen, box_ylen) / 2)
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < popt[1])
r30, r60 = rotate_corr(m_acorr)
gridscore = float(np.min(r60) - np.max(r30))
if return_acorr and return_masked_acorr:
return gridscore, acorr, m_acorr
if return_masked_acorr:
return gridscore, m_acorr
if return_acorr:
return gridscore, acorr # acorrs[grids.index(max(grids))]
else:
return gridscore
def rotate_corr(acorr):
from exana.misc.tools import masked_corrcoef2d
from scipy.ndimage.interpolation import rotate
angles = range(30, 180+30, 30)
corr = []
# Rotate and compute correlation coefficient
for angle in angles:
rot_acorr = rotate(acorr, angle, reshape=False)
corr.append(masked_corrcoef2d(rot_acorr, acorr)[0, 1])
r60 = corr[1::2]
r30 = corr[::2]
return r30, r60
def occupancy_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
'''Divide a 2D space in bins of size binsize**2, count the time spent
in each bin. The map can be convolved with a gaussian kernel of size
csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
occupancy_map : numpy.ndarray
if return_bins = True
out : occupancy_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x) - 1):
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
time_pos = time_pos[1:, 1:]
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def nvisits_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
return_bins=False):
'''Divide a 2D space in bins of size binsize**2, count the
number of visits in each bin. The map can be convolved with
a gaussian kernel of size determined by the smoothing factor,
binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
Returns
-------
nvisits_map : numpy.ndarray
if return_bins = True
out : nvisits_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
nvisits_map = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
if n == 0:
nvisits_map[ix[n], iy[n]] = 1
else:
if ix[n-1] != ix[n] or iy[n-1] != iy[n]:
nvisits_map[ix[n], iy[n]] += 1
# correct for shifting of map since digitize returns values at right edges
nvisits_map = nvisits_map[1:, 1:]
if return_bins:
return nvisits_map.T, xbins, ybins
else:
return nvisits_map.T
def spatial_rate_map_1d(x, t, spike_train,
binsize=0.01,
track_len=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
"""Take x coordinates of linear track data, divide in bins of binsize,
count the number of spikes in each bin and divide by the time spent in
respective bins. The map can then be convolved with a gaussian kernel of
size csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : array
x : array
1d vector of x positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins
"""
if not all([len(var) == len(var2) for var in [x, t] for var2 in [x, t]]):
raise ValueError('x, t must have same number of elements')
if track_len < x.max():
raise ValueError('track length must be\
larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(track_len)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, track_len + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
spike_pos = np.zeros(xbins.size)
time_pos = np.zeros(xbins.size)
for n in range(len(x)):
spike_pos[ix[n]] += spikes_in_bin[n]
time_pos[ix[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
spike_pos = spike_pos[1:]
time_pos = time_pos[1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (track_len / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins
else:
return rate.T
def separate_fields(rate_map, laplace_thrsh=0, center_method='maxima',
cutoff_method='none', box_xlen=1, box_ylen=1, index=False):
"""Separates fields using the laplacian to identify fields separated by
a negative second derivative.
Parameters
----------
rate_map : np 2d array
firing rate in each bin
laplace_thrsh : float
value of laplacian to separate fields by relative to the minima. Should be
on the interval 0 to 1, where 0 cuts off at 0 and 1 cuts off at
min(laplace(rate_map)). Default 0.
center_method : string
method to find field centers. Valid options = ['center_of_mass',
'maxima','gaussian_fit']
cutoff_method (optional) : string or function
function to exclude small fields. If local field value of function
is lower than global function value, the field is excluded. Valid
string_options = ['median', 'mean','none'].
index : bool, default False
return bump center values as index or xy-pos
Returns
-------
fields : numpy array, shape like rate_map.
contains areas all filled with same value, corresponding to fields
in rate_map. The values are in range(1,nFields + 1), sorted by size of the
field (sum of all field values). 0 elsewhere.
n_field : int
field count
bump_centers : (n_field x 2) np ndarray
Coordinates of field centers
"""
cutoff_functions = {'mean':np.mean, 'median':np.median, 'none':None}
if not callable(cutoff_method):
try:
cutoff_func = cutoff_functions[cutoff_method]
except KeyError:
msg = "invalid cutoff_method flag '%s'" % cutoff_method
raise ValueError(msg)
else:
cutoff_func = cutoff_method
from scipy import ndimage
l = ndimage.laplace(rate_map)
l[l>laplace_thrsh*np.min(l)] = 0
# Labels areas of the laplacian not connected by values > 0.
fields, n_fields = ndimage.label(l)
# index 0 is the background
indx = np.arange(1,n_fields+1)
# Use cutoff method to remove unwanted fields
if cutoff_method != 'none':
try:
total_value = cutoff_func(fields)
except:
print('Unexpected error, cutoff_func doesnt like the input:')
raise
field_values = ndimage.labeled_comprehension(rate_map, fields, indx,
cutoff_func, float, 0)
try:
is_field = field_values >= total_value
except:
print('cutoff_func return_values doesnt want to compare:')
raise
if np.sum(is_field) == 0:
return np.zeros(rate_map.shape), 0, np.array([[],[]])
for i in indx:
if not is_field[i-1]:
fields[fields == i] = 0
n_fields = ndimage.label(fields, output=fields)
indx = np.arange(1,n_fields + 1)
# Sort by largest mean
sizes = ndimage.labeled_comprehension(rate_map, fields, indx,
np.mean, float, 0)
size_sort = np.argsort(sizes)[::-1]
new = np.zeros_like(fields)
for i in np.arange(n_fields):
new[fields == size_sort[i]+1] = i+1
fields = new
bc = get_bump_centers(rate_map,labels=fields,ret_index=index,indices=indx,method=center_method,
units=box_xlen.units)
# TODO exclude fields where maxima is on the edge of the field?
return fields, n_fields, bc
def get_bump_centers(rate_map, labels, ret_index=False, indices=None, method='maxima',
units=1):
"""Finds center of fields at labels."""
from scipy import ndimage
if method not in ['maxima','center_of_mass','gaussian_fit']:
msg = "invalid center_method flag '%s'" % method
raise ValueError(msg)
if indices is None:
indices = np.arange(1,np.max(labels)+1)
if method == 'maxima':
bc = ndimage.maximum_position(rate_map, labels=labels,
index=indices)
elif method == 'center_of_mass':
bc = ndimage.center_of_mass(rate_map, labels=labels, index=indices)
elif method == 'gaussian_fit':
from exana.tracking.tools import fit_gauss_asym
bc = np.zeros((len(indices),2))
import matplotlib.pyplot as plt
for i in indices:
r = rate_map.copy()
r[labels != i] = 0
popt = fit_gauss_asym(r, return_data=False)
# TODO Find out which axis is x and which is y
bc[i-1] = (popt[2],popt[1])
if ret_index:
msg = 'ret_index not implemented for gaussian fit'
raise NotImplementedError(msg)
if not ret_index and not method=='gaussian_fit':
bc = (bc + np.array((0.5,0.5)))/rate_map.shape
return np.array(bc)*units
def find_avg_dist(rate_map, thrsh = 0, plot=False):
"""Uses autocorrelation and separate_fields to find average distance
between bumps. Is dependent on high gridness to get separate bumps in
the autocorrelation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
thrsh (optional) : float, default 0
cutoff value for the laplacian of the autocorrelation function.
Should be a negative number. Gives better separation if bumps are
connected by "bridges" or saddles where the laplacian is negative.
plot (optional) : bool, default False
plot acorr and the separated acorr, with bump centers
Returns
-------
avg_dist : float
relative units from 0 to 1 of the box size
"""
from scipy.ndimage import maximum_position
from exana.misc.tools import fftcorrelate2d
# autocorrelate. Returns array (2x - 1) the size of rate_map
acorr = fftcorrelate2d(rate_map,rate_map, mode = 'full', normalize = True)
#acorr[acorr<0] = 0 # TODO Fix this
f, nf, bump_centers = separate_fields(acorr,laplace_thrsh=thrsh,
center_method='maxima',cutoff_method='median')
# TODO Find a way to find valid value for
# thrsh, or remove.
bump_centers = np.array(bump_centers)
# find dists from center in (autocorrelation)relative units (from 0 to 1)
distances = np.linalg.norm(bump_centers - (0.5,0.5), axis = 1)
dist_sort = np.argsort(distances)
distances = distances[dist_sort]
# use maximum 6 closest values except center value
avg_dist = np.median(distances[1:7])
# correct for difference in shapes
avg_dist *= acorr.shape[0]/rate_map.shape[0] # = 1.98
# TODO : raise warning if too big difference between points
if plot:
import matplotlib.pyplot as plt
fig,[ax1,ax2] = plt.subplots(1,2)
ax1.imshow(acorr,extent = (0,1,0,1),origin='lower')
ax1.scatter(*(bump_centers[:,::-1].T))
ax2.imshow(f,extent = (0,1,0,1),origin='lower')
ax2.scatter(*(bump_centers[:,::-1].T))
return avg_dist
def fit_hex(bump_centers, avg_dist=None, plot_bumps = False, method='best'):
"""Fits a hex grid to a given set of bumps. Uses the three bumps most
Parameters
----------
bump_centers : Nx2 np.array
x,y positions of bump centers, x,y /in (0,1)
avg_dist (optional): float
average spacing between bumps
plot_bumps (optional): bool
if True, plots at the three bumps most likely to be in
correct hex-position to the current matplotlib axes.
method (optional): string, valid options: ['closest', 'best']
method to find angle from neighboring bumps.
'closest' uses six bumps nearest to center bump
'best' uses the two bumps nearest to avg_dist
Returns
-------
displacement : float
distance of bump closest to the center in meters
orientation : float
orientation of hexagon (in degrees)
"""
valid_methods = ['closest', 'best']
if method not in valid_methods:
msg = "invalid method flag '%s'" % method
raise ValueError(msg)
bump_centers = np.array(bump_centers)
# sort by distance to center
d = np.linalg.norm(bump_centers - (0.5,0.5), axis=1)
d_sort = np.argsort(d)
dist_sorted = bump_centers[d_sort]
center_bump = dist_sorted[0]; others = dist_sorted[1:]
displacement = d[d_sort][0]
# others distances to center bumps
relpos = others - center_bump
reldist = np.linalg.norm(relpos, axis=1)
if method == 'closest':
# get 6 closest bumps
rel_sort = np.argsort(reldist)
closest = others[rel_sort][:6]
relpos = relpos[rel_sort][:6]
elif method == 'best':
# get 2 bumps such that /sum_{i\neqj}(\abs{r_i-r_j}-avg_ist)^2 is minimized
squares = 1e32*np.ones((others.shape[0], others.shape[0]))
for i in range(len(relpos)):
for j in range(i,len(relpos)):
rel1 = (reldist[i] - avg_dist)**2
rel2 = (reldist[j] - avg_dist)**2
rel3 = (np.linalg.norm(relpos[i]-relpos[j]) - avg_dist)**2
squares[i,j] = rel1 + rel2 + rel3
rel_slice = np.unravel_index(np.argmin(squares), squares.shape)
rel_slice = np.array(rel_slice)
#rel_sort = np.argsort(np.abs(reldist-avg_dist))
closest = others[rel_slice]
relpos = relpos[rel_slice]
# sort by angle
a = np.arctan2(relpos[:,1], relpos[:,0])%(2*np.pi)
a_sort = np.argsort(a)
# extract lowest angle and convert to degrees
orientation = a[a_sort][0] *180/np.pi
# hex grid is symmetric under rotations of 60deg
orientation %= 60
if plot_bumps:
import matplotlib.pyplot as plt
ax=plt.gca()
i = 1
xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
dx = xmax-xmin; dy = ymax - ymin
closest = closest[a_sort]
edges = [center_bump] if method == 'best' else []
edges += [c for c in closest]
edges = np.array(edges)*(dx,dy) + (xmin, ymin)
poly = plt.Polygon(edges, alpha=0.5,color='r')
ax.add_artist(poly)
return displacement, orientation
def calculate_grid_geometry(rate_map, plot_fields=False, **kwargs):
"""Calculates quantitative information about grid field.
Find bump centers, bump spacing, center diplacement and hexagon
orientation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
plot_fields : if True, plots the field labels with field centers to the
current matplotlib ax. Default False
thrsh : float, default 0
see find_avg_dist()
center_method : string, valid options: ['maxima', 'center_of_mass']
default: 'center_of_mass'
see separate_fields()
method : string, valid options: ['closest', 'best']
see fit_hex()
Returns
-------
bump_centers : 2d np.array
x,y positions of bump centers
avg_dist : float
average spacing between bumps, \in [0,1]
displacement : float
distance of bump closest to the center
orientation : float
orientation of hexagon (in degrees)
Examples
--------
>>> import numpy as np
>>> rate_map = np.zeros((5,5))
>>> pos = np.array([ [0,2],
... [1,0],[1,4],
... [2,2],
... [3,0],[3,4],
... [4,2]])
>>> for(i,j) in pos:
... rate_map[i,j] = 1
...
>>> result = calculate_grid_geometry(rate_map)
"""
# TODO add back the following when it is correct
# (array([[0.5, 0.9],
# [0.9, 0.7],
# [0.1, 0.7],
# [0.5, 0.5],
# [0.9, 0.3],
# [0.1, 0.3],
# [0.5, 0.1]]) * m, 0.4472135954999579, 0.0, 26.565051177077983)
from scipy.ndimage import mean, center_of_mass
# TODO: smooth data?
# smooth_rate_map = lambda x:x
# rate_map = smooth_rate_map(rate_map)
center_method = kwargs.pop('center_method',None)
if center_method:
fields, nfields, bump_centers = separate_fields(rate_map,
center_method=center_method)
else:
fields, nfields, bump_centers = separate_fields(rate_map)
if bump_centers.size == 0:
import warnings
msg = 'couldnt find bump centers, returning None'
warnings.warn(msg, RuntimeWarning, stacklevel=2)
return None,None,None,None,
sh = np.array(rate_map.shape)
if plot_fields:
print(fields)
import matplotlib.pyplot as plt
x=np.linspace(0,1,sh[0]+1)
y=np.linspace(0,1,sh[1]+1)
x,y = np.meshgrid(x,y)
ax = plt.gca()
print('nfields: ',nfields)
plt.pcolormesh(x,y, fields)
# switch from row-column to x-y
bump_centers = bump_centers[:,::-1]
thrsh = kwargs.pop('thrsh', None)
if thrsh:
avg_dist = find_avg_dist(rate_map, thrsh)
else:
avg_dist = find_avg_dist(rate_map)
displacement, orientation = fit_hex(bump_centers, avg_dist,
plot_bumps=plot_fields, **kwargs)
return bump_centers, avg_dist, displacement, orientation
class RandomDisplacementBounds(object):
"""random displacement with bounds"""
def __init__(self, xmin, xmax, stepsize=0.5):
self.xmin = np.array(xmin)
self.xmax = np.array(xmax)
self.stepsize = stepsize
def __call__(self, x):
"""take a random step but ensure the new position is within the bounds"""
while True:
# this could be done in a much more clever way, but it will work for example purposes
xnew = x + (self.xmax-self.xmin)*np.random.uniform(-self.stepsize,
self.stepsize, np.shape(x))
if np.all(xnew < self.xmax) and np.all(xnew > self.xmin):
break
return xnew
def optimize_sep_fields(rate_map,step = 0.04, niter=40, T = 1.0, method = 'SLSQP',
glob=True, x0 = [0.065,0.1],callback=None):
"""Optimizes the separation of the fields by minimizing an error
function
Parameters
----------
rate_map :
method :
valid methods=['L-BFGS-B', 'TNC', 'SLSQP']
x0 : list
initial values for smoothing smoothing and laplace_thrsh
Returns
--------
res :
Result of the optimization. Contains smoothing and laplace_thrsh in
attribute res.x"""
from scipy import optimize
from exana.tracking.tools import separation_error_func as err_func
valid_methods = ['L-BFGS-B', 'TNC', 'SLSQP']
if method not in valid_methods:
raise ValueError('invalid method flag %s' %method)
rate_map[np.isnan(rate_map)] = 0.
method = 'SLSQP'
xmin = [0.025, 0]
xmax = [0.2, 1]
bounds = [(low,high) for low,high in zip(xmin,xmax)]
obj_func = lambda args: err_func(args[0], args[1], rate_map)
if glob:
take_step = RandomDisplacementBounds(xmin, xmax,stepsize=step)
minimizer_kwargs = dict(method=method, bounds=bounds)
res = optimize.basinhopping(obj_func, x0, niter=niter, T = T,
minimizer_kwargs=minimizer_kwargs,
take_step=take_step,callback=callback)
else:
res = optimize.minimize(obj_func, x0, method=method, bounds = bounds, options={'disp': True})
return res
if __name__ == "__main__":
import doctest
doctest.testmod()
| gpl-3.0 | 8,174,192,474,725,353,000 | 34.9102 | 101 | 0.591244 | false | 3.476921 | false | false | false |
btovar/cctools | apps/wq_hypersweep/test.py | 1 | 1690 | from work_queue import *
import sys
def compose_task(i,j):
id = (i-1)*20+j
d_rate = i*0.05
r_blok = j
outfile = "results%d.csv" % id
command = "./script.sh results%d.csv %f %d" % (id,d_rate,r_blok)
t = Task(command)
t.specify_file("env.tar.gz", "env.tar.gz", WORK_QUEUE_INPUT, cache=True)
t.specify_file("datasets/cifar-10-batches-py", "datasets/cifar-10-batches-py", WORK_QUEUE_INPUT, cache=True)
t.specify_file("resnet.py", "resnet.py", WORK_QUEUE_INPUT, cache=True)
t.specify_file("script.sh", "script.sh", WORK_QUEUE_INPUT, cache=True)
t.specify_file(outfile, outfile, WORK_QUEUE_OUTPUT, cache=False)
return t
def main():
try:
q = WorkQueue(port = WORK_QUEUE_DEFAULT_PORT)
except:
print("Instantiation of Work Queue failed.")
sys.exit(1)
print("Listening on port %d..." % q.port)
for i in range(1,21):
for j in range (1,21):
t = compose_task(i,j)
taskid = q.submit(t)
print("Submitted task (id# %d): %s" % (taskid, t.command))
print("waiting for tasks to complete...")
whitelist = []
blacklist = []
while not q.empty():
t = q.wait(5)
if t:
print("task (id# %d) complete: %s (return code %d)" % (t.id, t.command, t.return_status))
if t.return_status == 0:
if t.hostname not in whitelist:
whitelist.append(t.hostname)
if t.return_status != 0:
print("stdout:\n{}".format(t.output))
print("Blacklisting host: %s" % t.hostname)
q.blacklist(t.hostname)
blacklist.append(t.hostname)
q.submit(t)
print("Resubmitted task (id# %s): %s" % (t.id, t.command))
print("All tasks complete.")
print("Whitelist:", whitelist)
print("Blacklist:", blacklist)
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-2.0 | 2,644,520,837,213,823,500 | 25.40625 | 109 | 0.640828 | false | 2.592025 | false | false | false |
jhseu/tensorflow | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py | 1 | 2547 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/basic | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
# Verify that the tf.versions attribute exists. It is difficult to enforce
# contents, since the version numbers change over time. The conversion logic
# itself is verified in the common graphdef converter, so here just assert
# it is being invoked.
# CHECK: module
# CHECK-SAME: tf.versions
# CHECK-SAME: bad_consumers
# CHECK-SAME: min_consumer
# CHECK-SAME: producer
class TestModule(tf.Module):
def __init__(self):
super(TestModule, self).__init__()
self.v42 = tf.Variable(42.0)
self.c43 = tf.constant(43.0)
# CHECK: "tf_saved_model.global_tensor"() {is_mutable, sym_name = "[[VAR:[a-zA-Z_0-9]+]]", tf_saved_model.exported_names = ["v42"], type = tensor<f32>, value = dense<4.200000e+01> : tensor<f32>} : () -> ()
# CHECK: "tf_saved_model.global_tensor"() {sym_name = "[[CONST:[a-zA-Z_0-9]+]]", tf_saved_model.exported_names = [], type = tensor<f32>, value = dense<4.300000e+01> : tensor<f32>} : () -> ()
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<f32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: %arg1: tensor<*x!tf.resource> {tf_saved_model.bound_input = @[[VAR]]},
# CHECK-SAME: %arg2: tensor<f32> {tf_saved_model.bound_input = @[[CONST]]}) -> (
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = []})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x + self.v42 + self.c43
if __name__ == '__main__':
common.do_test(TestModule)
| apache-2.0 | -4,427,873,526,579,521,500 | 42.169492 | 207 | 0.666667 | false | 3.273779 | false | false | false |
anak10thn/graphics-dojo-qt5 | dragmove/dragmovecharm.py | 1 | 3229 | #############################################################################
##
## Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
## Contact: Qt Software Information ([email protected])
##
## This file is part of the Graphics Dojo project on Qt Labs.
##
## This file may be used under the terms of the GNU General Public
## License version 2.0 or 3.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of
## this file. Please review the following information to ensure GNU
## General Public Licensing requirements will be met:
## http://www.fsf.org/licensing/licenses/info/GPLv2.html and
## http://www.gnu.org/copyleft/gpl.html.
##
## If you are unsure which license is appropriate for your use, please
## contact the sales department at [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
from PyQt4.QtCore import QEvent, QObject, QPoint, Qt
from PyQt4.QtGui import QMouseEvent, QWidget
class DragMoveData:
def __init__(self):
self.isMoving = False
self.startDrag = QPoint()
class DragMoveCharm(QObject):
def __init__(self, parent = None):
QObject.__init__(self, parent)
self.dragMoveData = {}
def activateOn(self, widget):
if widget in self.dragMoveData:
return
data = DragMoveData()
data.startDrag = QPoint(0, 0)
data.isMoving = False
self.dragMoveData[widget] = data
widget.installEventFilter(self)
def deactivateFrom(self, widget):
del self.dragMoveData[widget]
self.dragMoveData.remove(widget)
widget.removeEventFilter(self)
def eventFilter(self, object, event):
if not isinstance(object, QWidget):
return False
widget = object
type = event.type()
if type != QEvent.MouseButtonPress and \
type != QEvent.MouseButtonRelease and \
type != QEvent.MouseMove:
return False
if isinstance(event, QMouseEvent):
if event.modifiers() != Qt.NoModifier:
return False
button = event.button()
mouseEvent = event
try:
data = self.dragMoveData[widget]
except KeyError:
return False
consumed = False
if type == QEvent.MouseButtonPress and button == Qt.LeftButton:
data.startDrag = QPoint(mouseEvent.globalPos())
data.isMoving = True
event.accept()
consumed = True
if type == QEvent.MouseButtonRelease:
data.startDrag = QPoint(0, 0)
data.isMoving = False
if type == QEvent.MouseMove and data.isMoving:
pos = mouseEvent.globalPos()
widget.move(widget.pos() + pos - data.startDrag)
data.startDrag = QPoint(pos)
consumed = True
return consumed
| gpl-2.0 | -7,986,007,107,172,308,000 | 31.616162 | 77 | 0.580056 | false | 4.393197 | false | false | false |
JackMorris/CaiusHallHelper | main.py | 1 | 1768 | import sys
from datetime import date, timedelta
from configuration import Configuration
from service import raven_service, email_service
def main():
configuration_file_path = sys.argv[1]
configuration = Configuration(configuration_file_path)
_authenticate_services(configuration)
_make_user_bookings(configuration.users, 3)
_send_user_reports(configuration.users, 0)
def _authenticate_services(configuration):
""" Use `configuration` to authenticate raven_service and email_service.
:param configuration: Configuration instance for system configuration
"""
raven_service.set_default_credentials(configuration.default_crsid,
configuration.default_password)
email_service.set_email_credentials(configuration.gmail_username,
configuration.gmail_password)
def _make_user_bookings(users, days_in_advance):
""" Create bookings for each user in `users`.
:param users: list of Users to create bookings for
:param days_in_advance: how far in advance to book
:return: list of Booking instances containing all booked events
"""
date_to_book = date.today() + timedelta(days=days_in_advance)
bookings = []
for user in users:
bookings.append(user.create_booking(date_to_book))
return bookings
def _send_user_reports(users, days_in_advance):
""" Send reports to each user in `users`.
:param users: list of User instances to send reports to
:param days_in_advance: how many days in advance the reports should be for
"""
date_for_report = date.today() + timedelta(days=days_in_advance)
for user in users:
user.email_report(date_for_report)
if __name__ == '__main__':
main() | mit | 4,981,933,257,555,847,000 | 35.102041 | 78 | 0.68948 | false | 4.121212 | true | false | false |
pombredanne/pytype | pytype/tests/test_attributes.py | 1 | 2408 | """Test instance and class attributes."""
from pytype.tests import test_inference
class TestAttributes(test_inference.InferenceTest):
"""Tests for attributes."""
def testSimpleAttribute(self):
with self.Infer("""
class A(object):
def method1(self):
self.a = 3
def method2(self):
self.a = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
a: complex or int
def method1(self) -> NoneType
def method2(self) -> NoneType
""")
def testOutsideAttributeAccess(self):
with self.Infer("""
class A(object):
pass
def f1():
A().a = 3
def f2():
A().a = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
a: complex or int
def f1() -> NoneType
def f2() -> NoneType
""")
def testPrivate(self):
with self.Infer("""
class C(object):
def __init__(self):
self._x = 3
def foo(self):
return self._x
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class C:
_x: int
def foo(self) -> int
""")
def testPublic(self):
with self.Infer("""
class C(object):
def __init__(self):
self.x = 3
def foo(self):
return self.x
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class C:
x: int
def foo(self) -> int
""")
def testCrosswise(self):
with self.Infer("""
class A(object):
def __init__(self):
if id(self):
self.b = B()
def set_on_b(self):
self.b.x = 3
class B(object):
def __init__(self):
if id(self):
self.a = A()
def set_on_a(self):
self.a.x = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
b: B
x: complex
def set_on_b(self) -> NoneType
class B:
a: A
x: int
def set_on_a(self) -> NoneType
""")
if __name__ == "__main__":
test_inference.main()
| apache-2.0 | 7,313,925,799,556,199,000 | 24.083333 | 69 | 0.50789 | false | 3.610195 | true | false | false |
DolphinDream/sverchok | nodes/curve/nearest_point.py | 1 | 6929 | import numpy as np
import bpy
from bpy.props import FloatProperty, EnumProperty, BoolProperty, IntProperty
from mathutils import Matrix
from mathutils.kdtree import KDTree
import sverchok
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, zip_long_repeat, ensure_nesting_level, get_data_nesting_level
from sverchok.utils.logging import info, exception
from sverchok.utils.curve import SvCurve
from sverchok.utils.dummy_nodes import add_dummy
from sverchok.dependencies import scipy
if scipy is None:
add_dummy('SvExNearestPointOnCurveNode', "Nearest Point on Curve", 'scipy')
else:
from scipy.optimize import minimize_scalar
def init_guess(curve, points_from, samples=50):
u_min, u_max = curve.get_u_bounds()
us = np.linspace(u_min, u_max, num=samples)
points = curve.evaluate_array(us).tolist()
#print("P:", points)
kdt = KDTree(len(us))
for i, v in enumerate(points):
kdt.insert(v, i)
kdt.balance()
us_out = []
nearest_out = []
for point_from in points_from:
nearest, i, distance = kdt.find(point_from)
us_out.append(us[i])
nearest_out.append(tuple(nearest))
return us_out, nearest_out
def goal(curve, point_from):
def distance(t):
dv = curve.evaluate(t) - np.array(point_from)
return np.linalg.norm(dv)
return distance
class SvExNearestPointOnCurveNode(bpy.types.Node, SverchCustomTreeNode):
"""
Triggers: Nearest Point on Curve
Tooltip: Find the point on the curve which is the nearest to the given point
"""
bl_idname = 'SvExNearestPointOnCurveNode'
bl_label = 'Nearest Point on Curve'
bl_icon = 'OUTLINER_OB_EMPTY'
sv_icon = 'SV_NEAREST_CURVE'
samples : IntProperty(
name = "Init Resolution",
default = 50,
min = 3,
update = updateNode)
precise : BoolProperty(
name = "Precise",
default = True,
update = updateNode)
solvers = [
('Brent', "Brent", "Uses inverse parabolic interpolation when possible to speed up convergence of golden section method", 0),
('Bounded', "Bounded", "Uses the Brent method to find a local minimum in the interval", 1),
('Golden', 'Golden Section', "Uses the golden section search technique", 2)
]
method : EnumProperty(
name = "Method",
description = "Solver method to use; select the one which works for your case",
items = solvers,
default = 'Brent',
update = updateNode)
def draw_buttons(self, context, layout):
layout.prop(self, 'samples')
layout.prop(self, 'precise', toggle=True)
def draw_buttons_ext(self, context, layout):
layout.prop(self, 'method')
def sv_init(self, context):
self.inputs.new('SvCurveSocket', "Curve")
p = self.inputs.new('SvVerticesSocket', "Point")
p.use_prop = True
p.default_property = (0.0, 0.0, 0.0)
self.outputs.new('SvVerticesSocket', "Point")
self.outputs.new('SvStringsSocket', "T")
def process(self):
if not any(socket.is_linked for socket in self.outputs):
return
curves_s = self.inputs['Curve'].sv_get()
curves_s = ensure_nesting_level(curves_s, 2, data_types=(SvCurve,))
src_point_s = self.inputs['Point'].sv_get()
src_point_s = ensure_nesting_level(src_point_s, 4)
points_out = []
t_out = []
for curves, src_points_i in zip_long_repeat(curves_s, src_point_s):
for curve, src_points in zip_long_repeat(curves, src_points_i):
t_min, t_max = curve.get_u_bounds()
new_t = []
new_points = []
init_ts, init_points = init_guess(curve, src_points,samples=self.samples)
#self.info("I: %s", init_points)
for src_point, init_t, init_point in zip(src_points, init_ts, init_points):
if self.precise:
delta_t = (t_max - t_min) / self.samples
self.debug("T_min %s, T_max %s, init_t %s, delta_t %s", t_min, t_max, init_t, delta_t)
if init_t <= t_min:
if init_t - delta_t >= t_min:
bracket = (init_t - delta_t, init_t, t_max)
else:
bracket = None # (t_min, t_min + delta_t, t_min + 2*delta_t)
elif init_t >= t_max:
if init_t + delta_t <= t_max:
bracket = (t_min, init_t, init_t + delta_t)
else:
bracket = None # (t_max - 2*delta_t, t_max - delta_t, t_max)
else:
bracket = (t_min, init_t, t_max)
result = minimize_scalar(goal(curve, src_point),
bounds = (t_min, t_max),
bracket = bracket,
method = self.method
)
if not result.success:
if hasattr(result, 'message'):
message = result.message
else:
message = repr(result)
raise Exception("Can't find the nearest point for {}: {}".format(src_point, message))
t0 = result.x
if t0 < t_min:
t0 = t_min
elif t0 > t_max:
t0 = t_max
else:
t0 = init_t
new_points.append(init_point)
new_t.append(t0)
if self.precise and self.outputs['Point'].is_linked:
new_points = curve.evaluate_array(np.array(new_t)).tolist()
points_out.append(new_points)
t_out.append(new_t)
self.outputs['Point'].sv_set(points_out)
self.outputs['T'].sv_set(t_out)
def register():
if scipy is not None:
bpy.utils.register_class(SvExNearestPointOnCurveNode)
def unregister():
if scipy is not None:
bpy.utils.unregister_class(SvExNearestPointOnCurveNode)
| gpl-3.0 | -8,756,187,733,861,882,000 | 39.758824 | 141 | 0.499928 | false | 4.066315 | false | false | false |
lituan/tools | pisa/ccp4_pisa.py | 1 | 2552 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
run pisa to parse interfaces in PDBs
first install CCP4 and change the following variables
"""
import os
import sys
import subprocess
import cPickle as pickle
from multiprocessing import Pool
os.environ['CCP4_SCR'] = 'C:\\ccp4temp'
os.environ['CCP4I_TCLTK'] = 'C:\\CCP4-7\\TclTk84\\bin'
os.environ['CBIN'] = 'C:\\CCP4-7\\7.0\\bin'
os.environ['CLIB'] = 'C:\\CCP4-7\\lib'
os.environ['CLIBD'] = 'C:\\CCP4-7\\lib\\data'
os.environ['CEXAM'] = 'C:\\CCP4-7\\examples'
os.environ['CHTML'] = 'C:\\CCP4-7\\html'
os.environ['CINCL'] = 'C:\\CCP4-7\\include'
os.environ['CCP4I_TOP'] = 'C:\\CCP4-7\\share\\ccp4i'
os.environ['CLIBD_MON'] = 'C:\\CCP4-7\\lib\\data\\monomers\\'
os.environ['MMCIFDIC'] = 'C:\\CCP4-7\\lib\\ccp4\\cif_mmdic.lib'
os.environ['CRANK'] = 'C:\\CCP4-7\\share\\ccp4i\\crank'
os.environ['CCP4_OPEN'] = 'unknown'
os.environ['GFORTRAN_UNBUFFERED_PRECONNECTED'] = 'Y'
os.environ['PATH'] = 'C:\\CCP4-7\\7.0\\bin'
os.environ['PISA_CONF_FILE'] = 'C:\\CCP4-7\\7.0\\share\\pisa\\pisa.cfg'
def pisa(f):
if not os.path.exists('detail'):
os.makedirs('detail')
if not os.path.exists('interface_xml'):
os.makedirs('interface_xml')
if not os.path.exists('assemble_xml'):
os.makedirs('assemble_xml')
pdbid = f[-8:-4].lower()
subprocess.call(['pisa',pdbid,'-analyse',f])
interface_xml_fname = os.path.join('interface_xml',pdbid+'_inteface.xml')
assemble_xml_fname = os.path.join('assemble_xml',pdbid+'_assemble.xml')
subprocess.call(['pisa',pdbid,'-xml','interfaces','>',interface_xml_fname],shell=True)
subprocess.call(['pisa',pdbid,'-xml','assemblies','>',assemble_xml_fname],shell=True)
# output = subprocess.check_output(['pisa',pdbid,'-detail','interfaces',str(interface_num)],shell=True)
for interface_num in range(100,200):
try:
output = subprocess.check_output(['pisa',pdbid,'-detail','interfaces',str(interface_num)],shell=True)
detail_fname = os.path.join('detail',pdbid+'_'+str(interface_num)+'_detail.txt')
subprocess.call(['pisa',pdbid,'-detail','interfaces',str(interface_num),'>',detail_fname],shell=True)
except:
continue
def main():
parameters = []
for root,dirs,files in os.walk(sys.argv[-1]):
for f in files:
if f[-4:] == '.pdb' and len(f) == 8:
f = os.path.join(root,f)
parameters.append(f)
p = Pool(8)
p.map(pisa,parameters)
p.close()
if __name__ == "__main__":
main()
| cc0-1.0 | 6,509,973,869,992,154,000 | 34.943662 | 113 | 0.617947 | false | 2.804396 | false | false | false |
scowcron/ImagesOfNetwork | images_of/entrypoints/audit_mods.py | 1 | 1450 | import click
from images_of import command, settings, Reddit
@command
@click.option('--print-mods', is_flag=True, help='List the non-default moderators for all subreddits')
def main(print_mods):
"""Find subs without mods and disenfranchised mods"""
mods = settings.DEFAULT_MODS
r = Reddit('Moderator Auditor v0.1')
r.oauth()
subs = sorted([sub['name'] for sub in settings.CHILD_SUBS])
empty_subs = list()
orphan_mods = dict()
s = r.get_subreddit(settings.PARENT_SUB)
main_sub_mods = [u.name for u in s.get_moderators()]
for sub in subs:
s = r.get_subreddit(sub)
cur_mods = [u.name for u in s.get_moderators()]
real_mods = [m for m in cur_mods if m not in mods]
if not real_mods:
empty_subs.append(sub)
else:
if print_mods:
print('{} : {}'.format(sub, real_mods))
for m in [i for i in real_mods if i not in main_sub_mods]:
orphan_mods[m] = orphan_mods.get(m, []) + [sub]
print()
print('Unmoderated Subreddits: {}'.format(len(empty_subs)))
print('-----------------------')
for sub in sorted(empty_subs):
print(sub)
print()
print('Orphaned Moderators: {}'.format(len(orphan_mods)))
print('-------------------------')
for m, s in orphan_mods.items():
print('{} : {}'.format(m, s))
if __name__ == '__main__':
main()
| mit | 2,044,205,861,254,028,000 | 26.884615 | 102 | 0.551034 | false | 3.460621 | false | false | false |
msmbuilder/mdentropy | mdentropy/cli/main.py | 1 | 1372 | from __future__ import print_function, absolute_import, division
import sys
import argparse
from .. import __version__
from . import dmutinf
from . import dtent
def main():
help = ('MDEntropy is a python library that allows users to perform '
'information-theoretic analyses on molecular dynamics (MD) '
'trajectories.')
p = argparse.ArgumentParser(description=help)
p.add_argument(
'-V', '--version',
action='version',
version='mdentropy %s' % __version__,
)
sub_parsers = p.add_subparsers(
metavar='command',
dest='cmd',
)
dmutinf.configure_parser(sub_parsers)
dtent.configure_parser(sub_parsers)
if len(sys.argv) == 1:
sys.argv.append('-h')
args = p.parse_args()
args_func(args, p)
def args_func(args, p):
try:
args.func(args, p)
except RuntimeError as e:
sys.exit("Error: %s" % e)
except Exception as e:
if e.__class__.__name__ not in ('ScannerError', 'ParserError'):
message = """\
An unexpected error has occurred with mdentropy (version %s), please
consider sending the following traceback to the mdentropy GitHub issue tracker at:
https://github.com/msmbuilder/mdentropy/issues
"""
print(message % __version__, file=sys.stderr)
raise # as if we did not catch it
| mit | 5,335,409,560,070,120,000 | 27 | 82 | 0.620991 | false | 3.728261 | false | false | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.