repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
nevercast/home-assistant | homeassistant/components/sensor/sabnzbd.py | 3 | 4100 | """
homeassistant.components.sensor.sabnzbd
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Monitors SABnzbd NZB client API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.sabnzbd/
"""
from datetime import timedelta
import logging
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIREMENTS = ['https://github.com/jamespcole/home-assistant-nzb-clients/'
'archive/616cad59154092599278661af17e2a9f2cf5e2a9.zip'
'#python-sabnzbd==0.1']
SENSOR_TYPES = {
'current_status': ['Status', ''],
'speed': ['Speed', 'MB/s'],
'queue_size': ['Queue', 'MB'],
'queue_remaining': ['Left', 'MB'],
'disk_size': ['Disk', 'GB'],
'disk_free': ['Disk Free', 'GB'],
}
_LOGGER = logging.getLogger(__name__)
_THROTTLED_REFRESH = None
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the SABnzbd sensors. """
from pysabnzbd import SabnzbdApi, SabnzbdApiException
api_key = config.get("api_key")
base_url = config.get("base_url")
name = config.get("name", "SABnzbd")
if not base_url:
_LOGGER.error('Missing config variable base_url')
return False
if not api_key:
_LOGGER.error('Missing config variable api_key')
return False
sab_api = SabnzbdApi(base_url, api_key)
try:
sab_api.check_available()
except SabnzbdApiException:
_LOGGER.exception("Connection to SABnzbd API failed.")
return False
# pylint: disable=global-statement
global _THROTTLED_REFRESH
_THROTTLED_REFRESH = Throttle(timedelta(seconds=1))(sab_api.refresh_queue)
dev = []
for variable in config['monitored_variables']:
if variable['type'] not in SENSOR_TYPES:
_LOGGER.error('Sensor type: "%s" does not exist', variable['type'])
else:
dev.append(SabnzbdSensor(variable['type'], sab_api, name))
add_devices(dev)
class SabnzbdSensor(Entity):
""" Represents an SABnzbd sensor. """
def __init__(self, sensor_type, sabnzb_client, client_name):
self._name = SENSOR_TYPES[sensor_type][0]
self.sabnzb_client = sabnzb_client
self.type = sensor_type
self.client_name = client_name
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
return self.client_name + ' ' + self._name
@property
def state(self):
""" Returns the state of the device. """
return self._state
@property
def unit_of_measurement(self):
""" Unit of measurement of this entity, if any. """
return self._unit_of_measurement
def refresh_sabnzbd_data(self):
""" Calls the throttled SABnzbd refresh method. """
if _THROTTLED_REFRESH is not None:
from pysabnzbd import SabnzbdApiException
try:
_THROTTLED_REFRESH()
except SabnzbdApiException:
_LOGGER.exception(
self.name + " Connection to SABnzbd API failed."
)
def update(self):
self.refresh_sabnzbd_data()
if self.sabnzb_client.queue:
if self.type == 'current_status':
self._state = self.sabnzb_client.queue.get('status')
elif self.type == 'speed':
mb_spd = float(self.sabnzb_client.queue.get('kbpersec')) / 1024
self._state = round(mb_spd, 1)
elif self.type == 'queue_size':
self._state = self.sabnzb_client.queue.get('mb')
elif self.type == 'queue_remaining':
self._state = self.sabnzb_client.queue.get('mbleft')
elif self.type == 'disk_size':
self._state = self.sabnzb_client.queue.get('diskspacetotal1')
elif self.type == 'disk_free':
self._state = self.sabnzb_client.queue.get('diskspace1')
else:
self._state = 'Unknown'
| mit | -2,726,056,907,172,945,400 | -3,029,040,925,710,498,300 | 32.333333 | 79 | 0.602439 | false |
creativcoder/servo | tests/wpt/css-tests/tools/wptserve/wptserve/constants.py | 141 | 4619 | from . import utils
content_types = utils.invert_dict({"text/html": ["htm", "html"],
"application/json": ["json"],
"application/xhtml+xml": ["xht", "xhtm", "xhtml"],
"application/xml": ["xml"],
"application/x-xpinstall": ["xpi"],
"text/javascript": ["js"],
"text/css": ["css"],
"text/plain": ["txt", "md"],
"image/svg+xml": ["svg"],
"image/gif": ["gif"],
"image/jpeg": ["jpg", "jpeg"],
"image/png": ["png"],
"image/bmp": ["bmp"],
"text/event-stream": ["event_stream"],
"text/cache-manifest": ["manifest"],
"video/mp4": ["mp4", "m4v"],
"audio/mp4": ["m4a"],
"audio/mpeg": ["mp3"],
"video/webm": ["webm"],
"audio/webm": ["weba"],
"video/ogg": ["ogg", "ogv"],
"audio/ogg": ["oga"],
"audio/x-wav": ["wav"],
"text/vtt": ["vtt"],})
response_codes = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
| mpl-2.0 | -8,286,468,505,545,221,000 | -6,661,118,824,177,308,000 | 49.206522 | 85 | 0.512449 | false |
jGaboardi/LP_MIP | Gurobi_Dual_Standard.py | 1 | 3689 | '''
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
'''
# Building a Standard Dual Linear Programming Problem
# in Python/Gurobi[gurobipy]
'''
Adapted from:
Daskin, M. S.
1995
Network and Discrete Location: Models, Algorithms, and Applications
Hoboken, NJ, USA: John Wiley & Sons, Inc.
'''
# Imports
import numpy as np
import gurobipy as gbp
import datetime as dt
def GbpDualStd():
# Constants
Aij = np.random.randint(5, 50, 25)
Aij = Aij.reshape(5,5)
AijSum = np.sum(Aij)
Cj = np.random.randint(10, 20, 5)
CjSum = np.sum(Cj)
Bi = np.random.randint(10, 20, 5)
BiSum = np.sum(Bi)
# Matrix Shape
rows = range(len(Aij))
cols = range(len(Aij[0]))
# Instantiate Model
mDual_Standard_GUROBI = gbp.Model(' -- Standard Dual Linear Programming Problem -- ')
# Set Focus to Optimality
gbp.setParam('MIPFocus', 2)
# Decision Variables
desc_var = []
for orig in rows:
desc_var.append([])
desc_var[orig].append(mDual_Standard_GUROBI.addVar(vtype=gbp.GRB.CONTINUOUS,
name='u'+str(orig+1)))
# Slack Variables
slack_var = []
for dest in cols:
slack_var.append([])
slack_var[dest].append(mDual_Standard_GUROBI.addVar(vtype=gbp.GRB.CONTINUOUS,
name='t'+str(dest+1)))
# Update Model
mDual_Standard_GUROBI.update()
#Objective Function
mDual_Standard_GUROBI.setObjective(gbp.quicksum(Bi[orig]*desc_var[orig][0]
for orig in rows),
gbp.GRB.MAXIMIZE)
# Constraints
for dest in cols:
mDual_Standard_GUROBI.addConstr(gbp.quicksum(Aij[orig][dest]*desc_var[orig][0]
for orig in rows) +
slack_var[dest][0] -
Cj[dest] == 0)
# Optimize
try:
mDual_Standard_GUROBI.optimize()
except Exception as e:
print ' ################################################################'
print ' < ISSUE : ', e, ' >'
print ' ################################################################'
# Write LP file
mDual_Standard_GUROBI.write('LP.lp')
print '\n*************************************************************************'
print ' | Decision Variables'
for v in mDual_Standard_GUROBI.getVars():
print ' | ', v.VarName, '=', v.x
print '*************************************************************************'
val = mDual_Standard_GUROBI.objVal
print ' | Objective Value ------------------ ', val
print ' | Aij Sum -------------------------- ', AijSum
print ' | Cj Sum --------------------------- ', CjSum
print ' | Bi Sum --------------------------- ', BiSum
print ' | Matrix Dimensions ---------------- ', Aij.shape
print ' | Date/Time ------------------------ ', dt.datetime.now()
print '*************************************************************************'
print '-- Gurobi Standard Dual Linear Programming Problem --'
try:
GbpDualCStd()
print '\nJames Gaboardi, 2015'
except Exception as e:
print ' ################################################################'
print ' < ISSUE : ', e, ' >'
print ' ################################################################' | lgpl-3.0 | 548,154,984,693,880,000 | 2,068,976,986,259,175,400 | 35.9 | 89 | 0.465709 | false |
OpenMined/PySyft | packages/syft/src/syft/lib/pandas/categorical_dtype.py | 1 | 1173 | # third party
import pandas as pd
# syft relative
from ...generate_wrapper import GenerateWrapper
from ...lib.python.list import List
from ...lib.python.primitive_factory import PrimitiveFactory
from ...proto.lib.pandas.categorical_pb2 import (
PandasCategoricalDtype as PandasCategoricalDtype_PB,
)
def object2proto(obj: pd.CategoricalDtype) -> PandasCategoricalDtype_PB:
# since pd.Index type is not integrated converted obj.categories to List
pd_cat_list = PrimitiveFactory.generate_primitive(value=obj.categories.tolist())
cat_list_proto = pd_cat_list._object2proto()
return PandasCategoricalDtype_PB(
id=cat_list_proto.id, categories=cat_list_proto, ordered=obj.ordered
)
def proto2object(proto: PandasCategoricalDtype_PB) -> pd.CategoricalDtype:
categories = List._proto2object(proto.categories).upcast()
ordered = proto.ordered
return pd.CategoricalDtype(categories=categories, ordered=ordered)
GenerateWrapper(
wrapped_type=pd.CategoricalDtype,
import_path="pandas.CategoricalDtype",
protobuf_scheme=PandasCategoricalDtype_PB,
type_object2proto=object2proto,
type_proto2object=proto2object,
)
| apache-2.0 | 4,604,379,653,059,531,300 | -1,353,565,426,342,675,200 | 32.514286 | 84 | 0.768116 | false |
boltnev/iktomi | iktomi/forms/fields.py | 3 | 14203 | # -*- coding: utf-8 -*-
import logging
import six
import cgi
import re
from . import convs, widgets
from ..utils import cached_property
from collections import OrderedDict
from .perms import FieldPerm
logger = logging.getLogger(__name__)
__all__ = ['BaseField', 'Field', 'FieldBlock', 'FieldSet', 'FieldList', 'FileField']
class BaseField(object):
'''
Simple container class which ancestors represents various parts of Form.
Encapsulates converter, various fields attributes, methods for data
access control
'''
# obsolete parameters from previous versions
_obsolete = frozenset(['default', 'get_default', 'template', 'media',
'render_type', 'render', 'required'])
#: :class:`FieldPerm` instance determining field's access permissions.
#: Can be set by field inheritance or throught constructor.
perm_getter = FieldPerm()
# defaults
#: :class:`Converter` instance determining field's convertation method
conv = convs.Char()
#: :class:`Widget` instance determining field's render method
widget = widgets.TextInput
#: Unicode label of the field
label = None
#: Short description of the field
hint = None
help = ''
def __init__(self, name, conv=None, parent=None, permissions=None, **kwargs):
if self._obsolete & set(kwargs):
raise TypeError(
'Obsolete parameters are used: {}'.format(
list(self._obsolete & set(kwargs))))
kwargs.update(
parent=parent,
name=name,
conv=(conv or self.conv)(field=self),
widget=(kwargs.get('widget') or self.widget)(field=self),
)
if permissions is not None:
kwargs['perm_getter'] = FieldPerm(permissions)
self._init_kwargs = kwargs
self.__dict__.update(kwargs)
def __call__(self, **kwargs):
'''
Creates current object's copy with extra constructor arguments passed.
'''
params = dict(self._init_kwargs, **kwargs)
return self.__class__(**params)
@property
def multiple(self):
return self.conv.multiple
@property
def env(self):
return self.parent.env
@property
def form(self):
return self.parent.form
@property
def input_name(self):
'''
Name of field's input element generated in account to possible
nesting of fields. The input name is to be used in templates as value
of Input (Select, etc) element's Name attribute and Label element's For
attribute.
'''
return self.parent.prefix + self.name
@property
def error(self):
'''
String description of validation error in this field during last accept.
`None` if there is no error.
'''
return self.form.errors.get(self.input_name)
@property
def help_message(self):
return self.help or self.form.get_help(self.input_name)
@cached_property
def clean_value(self):
'''
Current field's converted value from form's python_data.
'''
# XXX cached_property is used only for set initial state
# this property should be set every time field data
# has been changed, for instance, in accept method
python_data = self.parent.python_data
if self.name in python_data:
return python_data[self.name]
return self.get_initial()
@property
def id(self):
if self.form.id:
# We use template names in list to replace, so we must use it here to
# insure unique IDs.
return '{}-{}'.format(self.form.id, self.input_name)
return self.input_name
def from_python(self, value):
return self.conv.from_python(value)
@cached_property
def permissions(self):
'''
Field's access permissions. By default, is filled from perm_getter.
'''
return self.perm_getter.get_perms(self)
@cached_property
def writable(self):
return 'w' in self.permissions
@cached_property
def readable(self):
return 'r' in self.permissions
@cached_property
def field_names(self):
return [self.name]
def load_initial(self, initial, raw_data):
value = initial.get(self.name, self.get_initial())
self.set_raw_value(raw_data,
self.from_python(value))
return {self.name: value}
def __repr__(self):
args = ', '.join([k+'='+repr(v)
for k, v in self._init_kwargs.items()
if k not in ['widget', 'conv', 'parent']])
return '{}({})'.format(self.__class__.__name__, args)
class Field(BaseField):
'''
Atomic field
'''
conv = convs.Char()
_null_value = ''
def get_initial(self):
if hasattr(self, 'initial'):
return self.initial
if self.multiple:
return []
return None
@property
def raw_value(self):
if self.multiple:
return self.form.raw_data.getall(self.input_name)
else:
return self.form.raw_data.get(self.input_name, '')
def set_raw_value(self, raw_data, value):
if self.multiple:
try:
del raw_data[self.input_name]
except KeyError:
pass
for v in value:
raw_data.add(self.input_name, v)
else:
raw_data[self.input_name] = value
def _check_value_type(self, values):
if not self.multiple:
values = [values]
for value in values:
if not isinstance(value, six.string_types):
self.form.errors[self.input_name] = 'Given value has incompatible type'
return False
return True
def accept(self):
'''Extracts raw value from form's raw data and passes it to converter'''
value = self.raw_value
if not self._check_value_type(value):
# XXX should this be silent or TypeError?
value = [] if self.multiple else self._null_value
self.clean_value = self.conv.accept(value)
return {self.name: self.clean_value}
class AggregateField(BaseField):
@property
def python_data(self):
'''Representation of aggregate value as dictionary.'''
try:
value = self.clean_value
except LookupError:
# XXX is this necessary?
value = self.get_initial()
return self.from_python(value)
class FieldSet(AggregateField):
'''
Container field aggregating a couple of other different fields
'''
conv = convs.Converter()
widget = widgets.FieldSetWidget()
fields = []
def __init__(self, name, conv=None, fields=None, **kwargs):
fields = fields if fields is not None else self.fields
if kwargs.get('parent'):
conv = (conv or self.conv)(field=self)
fields = [field(parent=self) for field in fields]
kwargs.update(
name=name,
conv=conv,
fields=fields,
)
BaseField.__init__(self, **kwargs)
@property
def prefix(self):
return self.input_name+'.'
def get_field(self, name):
names = name.split('.', 1)
for field in self.fields:
if isinstance(field, FieldBlock):
result = field.get_field(name)
if result is not None:
return result
if field.name == names[0]:
if len(names) > 1:
return field.get_field(names[1])
return field
return None
def get_initial(self):
field_names = sum([x.field_names for x in self.fields], [])
result = dict((name, self.get_field(name).get_initial())
for name in field_names)
return self.conv.accept(result, silent=True)
def set_raw_value(self, raw_data, value):
# fills in raw_data multidict, resulting keys are field's absolute names
assert isinstance(value, dict), \
'To set raw value on {!r} need dict, got {!r}'\
.format(self.input_name, value)
if not value:
# Field set can be optional
return
field_names = sum([x.field_names for x in self.fields], [])
for field_name in field_names:
subvalue = value[field_name]
field = self.get_field(field_name)
field.set_raw_value(raw_data, field.from_python(subvalue))
def accept(self):
'''
Accepts all children fields, collects resulting values into dict and
passes that dict to converter.
Returns result of converter as separate value in parent `python_data`
'''
result = dict(self.python_data)
for field in self.fields:
if field.writable:
result.update(field.accept())
else:
# readonly field
field.set_raw_value(self.form.raw_data,
field.from_python(result[field.name]))
self.clean_value = self.conv.accept(result)
return {self.name: self.clean_value}
class FieldBlock(FieldSet):
'''
Anonymous FieldSet, values of one are accepted as they are children
of FieldBlock's parent.
FieldBlock is used to logically organize fields and do validation
of group of fields without naming that group and without dedicating
result of accept to separate object.
'''
conv = convs.FieldBlockConv()
widget = widgets.FieldBlockWidget()
prefix = ''
def __init__(self, title, fields=[], **kwargs):
kwargs.update(
title=title,
fields=fields,
)
kwargs.setdefault('name', '') # XXX generate unique name
FieldSet.__init__(self, **kwargs)
@cached_property
def prefix(self):
return self.parent.prefix
def accept(self):
'''
Acts as `Field.accepts` but returns result of every child field
as value in parent `python_data`.
'''
result = FieldSet.accept(self)
self.clean_value = result[self.name]
return self.clean_value
def load_initial(self, initial, raw_data):
result = {}
for field in self.fields:
result.update(field.load_initial(initial, raw_data))
return result
@cached_property
def field_names(self):
result = []
for field in self.fields:
result += field.field_names
return result
@property
def python_data(self):
# we need only subfield values in python data
result = {}
for field_name in self.field_names:
if field_name in self.parent.python_data:
result[field_name] = self.parent.python_data[field_name]
return result
class FieldList(AggregateField):
'''
Container aggregating an ordered set of similar fields
'''
order = True
conv = convs.List()
widget = widgets.FieldListWidget()
_digit_re = re.compile('\d+$')
def __init__(self, name, conv=None, field=Field(None),
parent=None, **kwargs):
if parent:
conv = (conv or self.conv)(field=self)
field = field(parent=self)
kwargs.update(
parent=parent,
name=name,
conv=conv,
field=field,
)
BaseField.__init__(self, **kwargs)
@property
def prefix(self):
# NOTE: There was '-' instead of '.' and get_field('list-1') was broken
return self.input_name+'.'
def get_initial(self):
return []
def get_field(self, name):
names = name.split('.', 1)
if not self._digit_re.match(names[0]):
# XXX is this needed?
return None
field = self.field(name=names[0])
if len(names) > 1:
return field.get_field(names[1])
return field
@property
def indices_input_name(self):
return self.input_name+'-indices'
def accept(self):
old = self.python_data
result = OrderedDict()
for index in self.form.raw_data.getall(self.indices_input_name):
try:
#XXX: we do not convert index to int, just check it.
# is it good idea?
int(index)
except ValueError:
logger.warning('Got incorrect index from form: %r', index)
continue
#TODO: describe this
field = self.field(name=str(index))
if not field.writable:
# readonly field
if index in old:
result[field.name] = old[field.name]
else:
result.update(field.accept())
self.clean_value = self.conv.accept(result)
return {self.name: self.clean_value}
def set_raw_value(self, raw_data, value):
indices = []
for index in range(1, len(value)+1):
index = str(index)
subvalue = value[index]
subfield = self.field(name=index)
subfield.set_raw_value(raw_data, subfield.from_python(subvalue))
indices.append(index)
try:
del raw_data[self.indices_input_name]
except KeyError:
pass
for index in indices:
raw_data.add(self.indices_input_name, index)
class FileField(Field):
'''
The simpliest file field
'''
_null_value = None
conv = convs.SimpleFile()
def set_raw_value(self, raw_data, value):
pass
def _check_value_type(self, values):
if not self.multiple:
values = [values]
for value in values:
if not isinstance(value, cgi.FieldStorage) and \
value and \
not hasattr(value, 'read'): # XXX is this right?
self.form.errors[self.input_name] = 'Given value is not file'
return False
return True
| mit | 1,354,166,745,902,632,700 | 4,818,443,566,504,797,000 | 29.676026 | 87 | 0.569809 | false |
catkin/xylem | xylem/load_url.py | 1 | 2512 | # Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper to download content from url."""
from __future__ import unicode_literals
import socket
import time
from six.moves.urllib.error import HTTPError
from six.moves.urllib.error import URLError
from six.moves.urllib.request import urlopen
import cgi
from xylem.exception import raise_from
from xylem.exception import XylemError
from xylem.text_utils import to_str
class DownloadFailure(XylemError):
"""Failure downloading data for I/O or other reasons."""
def load_url(url, retry=2, retry_period=1, timeout=10):
"""Load a given url with retries, retry_periods, and timeouts.
:param str url: URL to load and return contents of
:param int retry: number of times to retry the url on 503 or timeout
:param float retry_period: time to wait between retries in seconds
:param float timeout: timeout for opening the URL in seconds
:retunrs: loaded data as string
:rtype: str
:raises DownloadFailure: if loading fails even after retries
"""
retry = max(retry, 0) # negative retry count causes infinite loop
while True:
try:
req = urlopen(url, timeout=timeout)
except HTTPError as e:
if e.code == 503 and retry:
retry -= 1
time.sleep(retry_period)
else:
raise_from(DownloadFailure, "Failed to load url '{0}'.".
format(url), e)
except URLError as e:
if isinstance(e.reason, socket.timeout) and retry:
retry -= 1
time.sleep(retry_period)
else:
raise_from(DownloadFailure, "Failed to load url '{0}'.".
format(url), e)
else:
break
_, params = cgi.parse_header(req.headers.get('Content-Type', ''))
encoding = params.get('charset', 'utf-8')
data = req.read()
return to_str(data, encoding=encoding)
| apache-2.0 | 886,434,306,750,957,400 | 8,005,134,366,786,097,000 | 34.885714 | 74 | 0.662022 | false |
firebitsbr/infernal-twin | build/pillow/Scripts/painter.py | 11 | 2037 | #!/usr/bin/env python
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates pasting into an already displayed
# photoimage. note that the current version of Tk updates the whole
# image everytime we paste, so to get decent performance, we split
# the image into a set of tiles.
#
try:
from tkinter import Tk, Canvas, NW
except ImportError:
from Tkinter import Tk, Canvas, NW
from PIL import Image, ImageTk
import sys
#
# painter widget
class PaintCanvas(Canvas):
def __init__(self, master, image):
Canvas.__init__(self, master, width=image.size[0], height=image.size[1])
# fill the canvas
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
# process the image in some fashion
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
# update canvas
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
except KeyError:
pass # outside the image
self.update_idletasks()
#
# main
root = Tk()
im = Image.open(sys.argv[1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
| gpl-3.0 | 6,530,281,266,218,700,000 | 5,872,656,525,906,099,000 | 25.115385 | 80 | 0.571919 | false |
Neamar/django | django/core/files/storage.py | 281 | 13339 | import errno
import os
import warnings
from datetime import datetime
from django.conf import settings
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import File, locks
from django.core.files.move import file_move_safe
from django.utils._os import abspathu, safe_join
from django.utils.crypto import get_random_string
from django.utils.deconstruct import deconstructible
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import filepath_to_uri, force_text
from django.utils.functional import LazyObject
from django.utils.inspect import func_supports_parameter
from django.utils.module_loading import import_string
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.text import get_valid_filename
__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
class Storage(object):
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
"""
return self._open(name, mode)
def save(self, name, content, max_length=None):
"""
Saves new content to the file specified by name. The content should be
a proper File object or any python file-like object, ready to be read
from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
if not hasattr(content, 'chunks'):
content = File(content)
if func_supports_parameter(self.get_available_name, 'max_length'):
name = self.get_available_name(name, max_length=max_length)
else:
warnings.warn(
'Backwards compatibility for storage backends without '
'support for the `max_length` argument in '
'Storage.get_available_name() will be removed in Django 1.10.',
RemovedInDjango110Warning, stacklevel=2
)
name = self.get_available_name(name)
name = self._save(name, content)
# Store filenames with forward slashes, even on Windows
return force_text(name.replace('\\', '/'))
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Returns a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_available_name(self, name, max_length=None):
"""
Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, add an underscore and a random 7
# character alphanumeric string (before the file extension, if one
# exists) to the filename until the generated filename doesn't exist.
# Truncate original name if required, so the new filename does not
# exceed the max_length.
while self.exists(name) or (max_length and len(name) > max_length):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
if max_length is None:
continue
# Truncate file_root if max_length exceeded.
truncation = len(name) - max_length
if truncation > 0:
file_root = file_root[:-truncation]
# Entire file_root was truncated in attempt to find an available filename.
if not file_root:
raise SuspiciousFileOperation(
'Storage can not find an available filename for "%s". '
'Please make sure that the corresponding file field '
'allows sufficient "max_length".' % name
)
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
return name
def path(self, name):
"""
Returns a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Deletes the specified file from the storage system.
"""
raise NotImplementedError('subclasses of Storage must provide a delete() method')
def exists(self, name):
"""
Returns True if a file referenced by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError('subclasses of Storage must provide an exists() method')
def listdir(self, path):
"""
Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
raise NotImplementedError('subclasses of Storage must provide a listdir() method')
def size(self, name):
"""
Returns the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a size() method')
def url(self, name):
"""
Returns an absolute URL where the file's contents can be accessed
directly by a Web browser.
"""
raise NotImplementedError('subclasses of Storage must provide a url() method')
def accessed_time(self, name):
"""
Returns the last accessed time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide an accessed_time() method')
def created_time(self, name):
"""
Returns the creation time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a created_time() method')
def modified_time(self, name):
"""
Returns the last modified time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a modified_time() method')
@deconstructible
class FileSystemStorage(Storage):
"""
Standard filesystem storage
"""
def __init__(self, location=None, base_url=None, file_permissions_mode=None,
directory_permissions_mode=None):
if location is None:
location = settings.MEDIA_ROOT
self.base_location = location
self.location = abspathu(self.base_location)
if base_url is None:
base_url = settings.MEDIA_URL
elif not base_url.endswith('/'):
base_url += '/'
self.base_url = base_url
self.file_permissions_mode = (
file_permissions_mode if file_permissions_mode is not None
else settings.FILE_UPLOAD_PERMISSIONS
)
self.directory_permissions_mode = (
directory_permissions_mode if directory_permissions_mode is not None
else settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
)
def _open(self, name, mode='rb'):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
# Create any intermediate directories that do not exist.
# Note that there is a race between os.path.exists and os.makedirs:
# if os.makedirs fails with EEXIST, the directory was created
# concurrently, and we can continue normally. Refs #16082.
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
try:
if self.directory_permissions_mode is not None:
# os.makedirs applies the global umask, so we reset it,
# for consistency with file_permissions_mode behavior.
old_umask = os.umask(0)
try:
os.makedirs(directory, self.directory_permissions_mode)
finally:
os.umask(old_umask)
else:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
file_move_safe(content.temporary_file_path(), full_path)
# This is a normal uploadedfile that we can stream.
else:
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL |
getattr(os, 'O_BINARY', 0))
# The current umask value is masked out by os.open!
fd = os.open(full_path, flags, 0o666)
_file = None
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
if _file is None:
mode = 'wb' if isinstance(chunk, bytes) else 'wt'
_file = os.fdopen(fd, mode)
_file.write(chunk)
finally:
locks.unlock(fd)
if _file is not None:
_file.close()
else:
os.close(fd)
except OSError as e:
if e.errno == errno.EEXIST:
# Ooops, the file exists. We need a new file name.
name = self.get_available_name(name)
full_path = self.path(name)
else:
raise
else:
# OK, the file save worked. Break out of the loop.
break
if self.file_permissions_mode is not None:
os.chmod(full_path, self.file_permissions_mode)
return name
def delete(self, name):
assert name, "The name argument is not allowed to be empty."
name = self.path(name)
# If the file exists, delete it from the filesystem.
# Note that there is a race between os.path.exists and os.remove:
# if os.remove fails with ENOENT, the file was removed
# concurrently, and we can continue normally.
if os.path.exists(name):
try:
os.remove(name)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def exists(self, name):
return os.path.exists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
for entry in os.listdir(path):
if os.path.isdir(os.path.join(path, entry)):
directories.append(entry)
else:
files.append(entry)
return directories, files
def path(self, name):
return safe_join(self.location, name)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urljoin(self.base_url, filepath_to_uri(name))
def accessed_time(self, name):
return datetime.fromtimestamp(os.path.getatime(self.path(name)))
def created_time(self, name):
return datetime.fromtimestamp(os.path.getctime(self.path(name)))
def modified_time(self, name):
return datetime.fromtimestamp(os.path.getmtime(self.path(name)))
def get_storage_class(import_path=None):
return import_string(import_path or settings.DEFAULT_FILE_STORAGE)
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class()()
default_storage = DefaultStorage()
| bsd-3-clause | 8,884,785,607,021,297,000 | 83,586,703,464,820,500 | 38.81791 | 102 | 0.598021 | false |
Mirantis/swift-encrypt | swift/common/db_replicator.py | 3 | 29348 | # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import os
import random
import math
import time
import shutil
import uuid
import errno
import re
from eventlet import GreenPool, sleep, Timeout
from eventlet.green import subprocess
import simplejson
import swift.common.db
from swift.common.utils import get_logger, whataremyips, storage_directory, \
renamer, mkdirs, lock_parent_directory, config_true_value, \
unlink_older_than, dump_recon_cache, rsync_ip
from swift.common import ring
from swift.common.http import HTTP_NOT_FOUND, HTTP_INSUFFICIENT_STORAGE
from swift.common.bufferedhttp import BufferedHTTPConnection
from swift.common.exceptions import DriveNotMounted, ConnectionTimeout
from swift.common.daemon import Daemon
from swift.common.swob import Response, HTTPNotFound, HTTPNoContent, \
HTTPAccepted, HTTPBadRequest
DEBUG_TIMINGS_THRESHOLD = 10
def quarantine_db(object_file, server_type):
"""
In the case that a corrupt file is found, move it to a quarantined area to
allow replication to fix it.
:param object_file: path to corrupt file
:param server_type: type of file that is corrupt
('container' or 'account')
"""
object_dir = os.path.dirname(object_file)
quarantine_dir = os.path.abspath(
os.path.join(object_dir, '..', '..', '..', '..', 'quarantined',
server_type + 's', os.path.basename(object_dir)))
try:
renamer(object_dir, quarantine_dir)
except OSError, e:
if e.errno not in (errno.EEXIST, errno.ENOTEMPTY):
raise
quarantine_dir = "%s-%s" % (quarantine_dir, uuid.uuid4().hex)
renamer(object_dir, quarantine_dir)
def roundrobin_datadirs(datadirs):
"""
Generator to walk the data dirs in a round robin manner, evenly
hitting each device on the system, and yielding any .db files
found (in their proper places). The partitions within each data
dir are walked randomly, however.
:param datadirs: a list of (path, node_id) to walk
:returns: A generator of (partition, path_to_db_file, node_id)
"""
def walk_datadir(datadir, node_id):
partitions = os.listdir(datadir)
random.shuffle(partitions)
for partition in partitions:
part_dir = os.path.join(datadir, partition)
if not os.path.isdir(part_dir):
continue
suffixes = os.listdir(part_dir)
for suffix in suffixes:
suff_dir = os.path.join(part_dir, suffix)
if not os.path.isdir(suff_dir):
continue
hashes = os.listdir(suff_dir)
for hsh in hashes:
hash_dir = os.path.join(suff_dir, hsh)
if not os.path.isdir(hash_dir):
continue
object_file = os.path.join(hash_dir, hsh + '.db')
if os.path.exists(object_file):
yield (partition, object_file, node_id)
its = [walk_datadir(datadir, node_id) for datadir, node_id in datadirs]
while its:
for it in its:
try:
yield it.next()
except StopIteration:
its.remove(it)
class ReplConnection(BufferedHTTPConnection):
"""
Helper to simplify REPLICATEing to a remote server.
"""
def __init__(self, node, partition, hash_, logger):
""
self.logger = logger
self.node = node
BufferedHTTPConnection.__init__(self, '%(ip)s:%(port)s' % node)
self.path = '/%s/%s/%s' % (node['device'], partition, hash_)
def replicate(self, *args):
"""
Make an HTTP REPLICATE request
:param args: list of json-encodable objects
:returns: httplib response object
"""
try:
body = simplejson.dumps(args)
self.request('REPLICATE', self.path, body,
{'Content-Type': 'application/json'})
response = self.getresponse()
response.data = response.read()
return response
except (Exception, Timeout):
self.logger.exception(
_('ERROR reading HTTP response from %s'), self.node)
return None
class Replicator(Daemon):
"""
Implements the logic for directing db replication.
"""
def __init__(self, conf):
self.conf = conf
self.logger = get_logger(conf, log_route='replicator')
self.root = conf.get('devices', '/srv/node')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.port = int(conf.get('bind_port', self.default_port))
concurrency = int(conf.get('concurrency', 8))
self.cpool = GreenPool(size=concurrency)
swift_dir = conf.get('swift_dir', '/etc/swift')
self.ring = ring.Ring(swift_dir, ring_name=self.server_type)
self.per_diff = int(conf.get('per_diff', 1000))
self.max_diffs = int(conf.get('max_diffs') or 100)
self.interval = int(conf.get('interval') or
conf.get('run_pause') or 30)
self.vm_test_mode = config_true_value(conf.get('vm_test_mode', 'no'))
self.node_timeout = int(conf.get('node_timeout', 10))
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
self.reclaim_age = float(conf.get('reclaim_age', 86400 * 7))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self._zero_stats()
self.recon_cache_path = conf.get('recon_cache_path',
'/var/cache/swift')
self.recon_replicator = '%s.recon' % self.server_type
self.rcache = os.path.join(self.recon_cache_path,
self.recon_replicator)
self.extract_device_re = re.compile('%s%s([^%s]+)' % (
self.root, os.path.sep, os.path.sep))
def _zero_stats(self):
"""Zero out the stats."""
self.stats = {'attempted': 0, 'success': 0, 'failure': 0, 'ts_repl': 0,
'no_change': 0, 'hashmatch': 0, 'rsync': 0, 'diff': 0,
'remove': 0, 'empty': 0, 'remote_merge': 0,
'start': time.time(), 'diff_capped': 0}
def _report_stats(self):
"""Report the current stats to the logs."""
self.logger.info(
_('Attempted to replicate %(count)d dbs in %(time).5f seconds '
'(%(rate).5f/s)'),
{'count': self.stats['attempted'],
'time': time.time() - self.stats['start'],
'rate': self.stats['attempted'] /
(time.time() - self.stats['start'] + 0.0000001)})
self.logger.info(_('Removed %(remove)d dbs') % self.stats)
self.logger.info(_('%(success)s successes, %(failure)s failures')
% self.stats)
dump_recon_cache(
{'replication_stats': self.stats,
'replication_time': time.time() - self.stats['start'],
'replication_last': time.time()},
self.rcache, self.logger)
self.logger.info(' '.join(['%s:%s' % item for item in
self.stats.items() if item[0] in
('no_change', 'hashmatch', 'rsync', 'diff', 'ts_repl',
'empty', 'diff_capped')]))
def _rsync_file(self, db_file, remote_file, whole_file=True):
"""
Sync a single file using rsync. Used by _rsync_db to handle syncing.
:param db_file: file to be synced
:param remote_file: remote location to sync the DB file to
:param whole-file: if True, uses rsync's --whole-file flag
:returns: True if the sync was successful, False otherwise
"""
popen_args = ['rsync', '--quiet', '--no-motd',
'--timeout=%s' % int(math.ceil(self.node_timeout)),
'--contimeout=%s' % int(math.ceil(self.conn_timeout))]
if whole_file:
popen_args.append('--whole-file')
popen_args.extend([db_file, remote_file])
proc = subprocess.Popen(popen_args)
proc.communicate()
if proc.returncode != 0:
self.logger.error(_('ERROR rsync failed with %(code)s: %(args)s'),
{'code': proc.returncode, 'args': popen_args})
return proc.returncode == 0
def _rsync_db(self, broker, device, http, local_id,
replicate_method='complete_rsync', replicate_timeout=None):
"""
Sync a whole db using rsync.
:param broker: DB broker object of DB to be synced
:param device: device to sync to
:param http: ReplConnection object
:param local_id: unique ID of the local database replica
:param replicate_method: remote operation to perform after rsync
:param replicate_timeout: timeout to wait in seconds
"""
device_ip = rsync_ip(device['ip'])
if self.vm_test_mode:
remote_file = '%s::%s%s/%s/tmp/%s' % (
device_ip, self.server_type, device['port'], device['device'],
local_id)
else:
remote_file = '%s::%s/%s/tmp/%s' % (
device_ip, self.server_type, device['device'], local_id)
mtime = os.path.getmtime(broker.db_file)
if not self._rsync_file(broker.db_file, remote_file):
return False
# perform block-level sync if the db was modified during the first sync
if os.path.exists(broker.db_file + '-journal') or \
os.path.getmtime(broker.db_file) > mtime:
# grab a lock so nobody else can modify it
with broker.lock():
if not self._rsync_file(broker.db_file, remote_file, False):
return False
with Timeout(replicate_timeout or self.node_timeout):
response = http.replicate(replicate_method, local_id)
return response and response.status >= 200 and response.status < 300
def _usync_db(self, point, broker, http, remote_id, local_id):
"""
Sync a db by sending all records since the last sync.
:param point: synchronization high water mark between the replicas
:param broker: database broker object
:param http: ReplConnection object for the remote server
:param remote_id: database id for the remote replica
:param local_id: database id for the local replica
:returns: boolean indicating completion and success
"""
self.stats['diff'] += 1
self.logger.increment('diffs')
self.logger.debug(_('Syncing chunks with %s'), http.host)
sync_table = broker.get_syncs()
objects = broker.get_items_since(point, self.per_diff)
diffs = 0
while len(objects) and diffs < self.max_diffs:
diffs += 1
with Timeout(self.node_timeout):
response = http.replicate('merge_items', objects, local_id)
if not response or response.status >= 300 or response.status < 200:
if response:
self.logger.error(_('ERROR Bad response %(status)s from '
'%(host)s'),
{'status': response.status,
'host': http.host})
return False
point = objects[-1]['ROWID']
objects = broker.get_items_since(point, self.per_diff)
if objects:
self.logger.debug(_(
'Synchronization for %s has fallen more than '
'%s rows behind; moving on and will try again next pass.') %
(broker.db_file, self.max_diffs * self.per_diff))
self.stats['diff_capped'] += 1
self.logger.increment('diff_caps')
else:
with Timeout(self.node_timeout):
response = http.replicate('merge_syncs', sync_table)
if response and response.status >= 200 and response.status < 300:
broker.merge_syncs([{'remote_id': remote_id,
'sync_point': point}],
incoming=False)
return True
return False
def _in_sync(self, rinfo, info, broker, local_sync):
"""
Determine whether or not two replicas of a databases are considered
to be in sync.
:param rinfo: remote database info
:param info: local database info
:param broker: database broker object
:param local_sync: cached last sync point between replicas
:returns: boolean indicating whether or not the replicas are in sync
"""
if max(rinfo['point'], local_sync) >= info['max_row']:
self.stats['no_change'] += 1
self.logger.increment('no_changes')
return True
if rinfo['hash'] == info['hash']:
self.stats['hashmatch'] += 1
self.logger.increment('hashmatches')
broker.merge_syncs([{'remote_id': rinfo['id'],
'sync_point': rinfo['point']}],
incoming=False)
return True
def _http_connect(self, node, partition, db_file):
"""
Make an http_connection using ReplConnection
:param node: node dictionary from the ring
:param partition: partition partition to send in the url
:param db_file: DB file
:returns: ReplConnection object
"""
return ReplConnection(node, partition,
os.path.basename(db_file).split('.', 1)[0],
self.logger)
def _repl_to_node(self, node, broker, partition, info):
"""
Replicate a database to a node.
:param node: node dictionary from the ring to be replicated to
:param broker: DB broker for the DB to be replication
:param partition: partition on the node to replicate to
:param info: DB info as a dictionary of {'max_row', 'hash', 'id',
'created_at', 'put_timestamp', 'delete_timestamp',
'metadata'}
:returns: True if successful, False otherwise
"""
with ConnectionTimeout(self.conn_timeout):
http = self._http_connect(node, partition, broker.db_file)
if not http:
self.logger.error(
_('ERROR Unable to connect to remote server: %s'), node)
return False
with Timeout(self.node_timeout):
response = http.replicate(
'sync', info['max_row'], info['hash'], info['id'],
info['created_at'], info['put_timestamp'],
info['delete_timestamp'], info['metadata'])
if not response:
return False
elif response.status == HTTP_NOT_FOUND: # completely missing, rsync
self.stats['rsync'] += 1
self.logger.increment('rsyncs')
return self._rsync_db(broker, node, http, info['id'])
elif response.status == HTTP_INSUFFICIENT_STORAGE:
raise DriveNotMounted()
elif response.status >= 200 and response.status < 300:
rinfo = simplejson.loads(response.data)
local_sync = broker.get_sync(rinfo['id'], incoming=False)
if self._in_sync(rinfo, info, broker, local_sync):
return True
# if the difference in rowids between the two differs by
# more than 50%, rsync then do a remote merge.
if rinfo['max_row'] / float(info['max_row']) < 0.5:
self.stats['remote_merge'] += 1
self.logger.increment('remote_merges')
return self._rsync_db(broker, node, http, info['id'],
replicate_method='rsync_then_merge',
replicate_timeout=(info['count'] / 2000))
# else send diffs over to the remote server
return self._usync_db(max(rinfo['point'], local_sync),
broker, http, rinfo['id'], info['id'])
def _replicate_object(self, partition, object_file, node_id):
"""
Replicate the db, choosing method based on whether or not it
already exists on peers.
:param partition: partition to be replicated to
:param object_file: DB file name to be replicated
:param node_id: node id of the node to be replicated to
"""
start_time = time.time()
self.logger.debug(_('Replicating db %s'), object_file)
self.stats['attempted'] += 1
self.logger.increment('attempts')
try:
broker = self.brokerclass(object_file, pending_timeout=30)
broker.reclaim(time.time() - self.reclaim_age,
time.time() - (self.reclaim_age * 2))
info = broker.get_replication_info()
full_info = broker.get_info()
except (Exception, Timeout), e:
if 'no such table' in str(e):
self.logger.error(_('Quarantining DB %s'), object_file)
quarantine_db(broker.db_file, broker.db_type)
else:
self.logger.exception(_('ERROR reading db %s'), object_file)
self.stats['failure'] += 1
self.logger.increment('failures')
return
# The db is considered deleted if the delete_timestamp value is greater
# than the put_timestamp, and there are no objects.
delete_timestamp = 0
try:
delete_timestamp = float(info['delete_timestamp'])
except ValueError:
pass
put_timestamp = 0
try:
put_timestamp = float(info['put_timestamp'])
except ValueError:
pass
if delete_timestamp < (time.time() - self.reclaim_age) and \
delete_timestamp > put_timestamp and \
info['count'] in (None, '', 0, '0'):
if self.report_up_to_date(full_info):
self.delete_db(object_file)
self.logger.timing_since('timing', start_time)
return
responses = []
nodes = self.ring.get_part_nodes(int(partition))
shouldbehere = bool([n for n in nodes if n['id'] == node_id])
# See Footnote [1] for an explanation of the repl_nodes assignment.
i = 0
while i < len(nodes) and nodes[i]['id'] != node_id:
i += 1
repl_nodes = nodes[i + 1:] + nodes[:i]
more_nodes = self.ring.get_more_nodes(int(partition))
for node in repl_nodes:
success = False
try:
success = self._repl_to_node(node, broker, partition, info)
except DriveNotMounted:
repl_nodes.append(more_nodes.next())
self.logger.error(_('ERROR Remote drive not mounted %s'), node)
except (Exception, Timeout):
self.logger.exception(_('ERROR syncing %(file)s with node'
' %(node)s'),
{'file': object_file, 'node': node})
self.stats['success' if success else 'failure'] += 1
self.logger.increment('successes' if success else 'failures')
responses.append(success)
if not shouldbehere and all(responses):
# If the db shouldn't be on this node and has been successfully
# synced to all of its peers, it can be removed.
self.delete_db(object_file)
self.logger.timing_since('timing', start_time)
def delete_db(self, object_file):
hash_dir = os.path.dirname(object_file)
suf_dir = os.path.dirname(hash_dir)
with lock_parent_directory(object_file):
shutil.rmtree(hash_dir, True)
try:
os.rmdir(suf_dir)
except OSError, err:
if err.errno not in (errno.ENOENT, errno.ENOTEMPTY):
self.logger.exception(
_('ERROR while trying to clean up %s') % suf_dir)
self.stats['remove'] += 1
device_name = self.extract_device(object_file)
self.logger.increment('removes.' + device_name)
def extract_device(self, object_file):
"""
Extract the device name from an object path. Returns "UNKNOWN" if the
path could not be extracted successfully for some reason.
:param object_file: the path to a database file.
"""
match = self.extract_device_re.match(object_file)
if match:
return match.groups()[0]
return "UNKNOWN"
def report_up_to_date(self, full_info):
return True
def run_once(self, *args, **kwargs):
"""Run a replication pass once."""
self._zero_stats()
dirs = []
ips = whataremyips()
if not ips:
self.logger.error(_('ERROR Failed to get my own IPs?'))
return
for node in self.ring.devs:
if node and node['ip'] in ips and node['port'] == self.port:
if self.mount_check and not os.path.ismount(
os.path.join(self.root, node['device'])):
self.logger.warn(
_('Skipping %(device)s as it is not mounted') % node)
continue
unlink_older_than(
os.path.join(self.root, node['device'], 'tmp'),
time.time() - self.reclaim_age)
datadir = os.path.join(self.root, node['device'], self.datadir)
if os.path.isdir(datadir):
dirs.append((datadir, node['id']))
self.logger.info(_('Beginning replication run'))
for part, object_file, node_id in roundrobin_datadirs(dirs):
self.cpool.spawn_n(
self._replicate_object, part, object_file, node_id)
self.cpool.waitall()
self.logger.info(_('Replication run OVER'))
self._report_stats()
def run_forever(self, *args, **kwargs):
"""
Replicate dbs under the given root in an infinite loop.
"""
sleep(random.random() * self.interval)
while True:
begin = time.time()
try:
self.run_once()
except (Exception, Timeout):
self.logger.exception(_('ERROR trying to replicate'))
elapsed = time.time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
class ReplicatorRpc(object):
"""Handle Replication RPC calls. TODO(redbo): document please :)"""
def __init__(self, root, datadir, broker_class, mount_check=True,
logger=None):
self.root = root
self.datadir = datadir
self.broker_class = broker_class
self.mount_check = mount_check
self.logger = logger or get_logger({}, log_route='replicator-rpc')
def dispatch(self, replicate_args, args):
if not hasattr(args, 'pop'):
return HTTPBadRequest(body='Invalid object type')
op = args.pop(0)
drive, partition, hsh = replicate_args
if self.mount_check and \
not os.path.ismount(os.path.join(self.root, drive)):
return Response(status='507 %s is not mounted' % drive)
db_file = os.path.join(self.root, drive,
storage_directory(self.datadir, partition, hsh),
hsh + '.db')
if op == 'rsync_then_merge':
return self.rsync_then_merge(drive, db_file, args)
if op == 'complete_rsync':
return self.complete_rsync(drive, db_file, args)
else:
# someone might be about to rsync a db to us,
# make sure there's a tmp dir to receive it.
mkdirs(os.path.join(self.root, drive, 'tmp'))
if not os.path.exists(db_file):
return HTTPNotFound()
return getattr(self, op)(self.broker_class(db_file), args)
def sync(self, broker, args):
(remote_sync, hash_, id_, created_at, put_timestamp,
delete_timestamp, metadata) = args
timemark = time.time()
try:
info = broker.get_replication_info()
except (Exception, Timeout), e:
if 'no such table' in str(e):
self.logger.error(_("Quarantining DB %s") % broker.db_file)
quarantine_db(broker.db_file, broker.db_type)
return HTTPNotFound()
raise
timespan = time.time() - timemark
if timespan > DEBUG_TIMINGS_THRESHOLD:
self.logger.debug(_('replicator-rpc-sync time for info: %.02fs') %
timespan)
if metadata:
timemark = time.time()
broker.update_metadata(simplejson.loads(metadata))
timespan = time.time() - timemark
if timespan > DEBUG_TIMINGS_THRESHOLD:
self.logger.debug(_('replicator-rpc-sync time for '
'update_metadata: %.02fs') % timespan)
if info['put_timestamp'] != put_timestamp or \
info['created_at'] != created_at or \
info['delete_timestamp'] != delete_timestamp:
timemark = time.time()
broker.merge_timestamps(
created_at, put_timestamp, delete_timestamp)
timespan = time.time() - timemark
if timespan > DEBUG_TIMINGS_THRESHOLD:
self.logger.debug(_('replicator-rpc-sync time for '
'merge_timestamps: %.02fs') % timespan)
timemark = time.time()
info['point'] = broker.get_sync(id_)
timespan = time.time() - timemark
if timespan > DEBUG_TIMINGS_THRESHOLD:
self.logger.debug(_('replicator-rpc-sync time for get_sync: '
'%.02fs') % timespan)
if hash_ == info['hash'] and info['point'] < remote_sync:
timemark = time.time()
broker.merge_syncs([{'remote_id': id_,
'sync_point': remote_sync}])
info['point'] = remote_sync
timespan = time.time() - timemark
if timespan > DEBUG_TIMINGS_THRESHOLD:
self.logger.debug(_('replicator-rpc-sync time for '
'merge_syncs: %.02fs') % timespan)
return Response(simplejson.dumps(info))
def merge_syncs(self, broker, args):
broker.merge_syncs(args[0])
return HTTPAccepted()
def merge_items(self, broker, args):
broker.merge_items(args[0], args[1])
return HTTPAccepted()
def complete_rsync(self, drive, db_file, args):
old_filename = os.path.join(self.root, drive, 'tmp', args[0])
if os.path.exists(db_file):
return HTTPNotFound()
if not os.path.exists(old_filename):
return HTTPNotFound()
broker = self.broker_class(old_filename)
broker.newid(args[0])
renamer(old_filename, db_file)
return HTTPNoContent()
def rsync_then_merge(self, drive, db_file, args):
old_filename = os.path.join(self.root, drive, 'tmp', args[0])
if not os.path.exists(db_file) or not os.path.exists(old_filename):
return HTTPNotFound()
new_broker = self.broker_class(old_filename)
existing_broker = self.broker_class(db_file)
point = -1
objects = existing_broker.get_items_since(point, 1000)
while len(objects):
new_broker.merge_items(objects)
point = objects[-1]['ROWID']
objects = existing_broker.get_items_since(point, 1000)
sleep()
new_broker.newid(args[0])
renamer(old_filename, db_file)
return HTTPNoContent()
# Footnote [1]:
# This orders the nodes so that, given nodes a b c, a will contact b then c,
# b will contact c then a, and c will contact a then b -- in other words, each
# node will always contact the next node in the list first.
# This helps in the case where databases are all way out of sync, so each
# node is likely to be sending to a different node than it's receiving from,
# rather than two nodes talking to each other, starving out the third.
# If the third didn't even have a copy and the first two nodes were way out
# of sync, such starvation would mean the third node wouldn't get any copy
# until the first two nodes finally got in sync, which could take a while.
# This new ordering ensures such starvation doesn't occur, making the data
# more durable.
| apache-2.0 | -5,551,986,106,652,244,000 | 3,025,560,672,703,539,000 | 42.222386 | 79 | 0.56566 | false |
acshan/odoo | openerp/addons/base/res/__init__.py | 384 | 1261 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import res_country
import res_lang
import res_partner
import res_bank
import res_config
import res_currency
import res_font
import res_company
import res_users
import res_request
import res_lang
import ir_property
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,819,930,846,090,427,000 | -6,290,796,814,277,922,000 | 34.027778 | 78 | 0.647898 | false |
ruschelp/cortex-vfx | python/IECoreMaya/TemporaryAttributeValues.py | 12 | 4259 | ##########################################################################
#
# Copyright (c) 2009-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import maya.OpenMaya
import IECore
import StringUtil
## A context manager for controlling attribute values in with statements. It
# sets attributes to requested values on entering the block and resets them to
# their previous values on exiting the block.
class TemporaryAttributeValues :
def __init__( self, attributeAndValues = {}, **kw ) :
self.__attributesAndValues = attributeAndValues
self.__attributesAndValues.update( kw )
def __enter__( self ) :
handlers = {
"enum" : self.__simpleAttrHandler,
"bool" : self.__simpleAttrHandler,
"float" : self.__simpleAttrHandler,
"long" : self.__simpleAttrHandler,
"short" : self.__simpleAttrHandler,
"float2" : IECore.curry( self.__numeric2AttrHandler, attributeType="float2" ),
"long2" : IECore.curry( self.__numeric2AttrHandler, attributeType="long2" ),
"short2" : IECore.curry( self.__numeric2AttrHandler, attributeType="short2" ),
"float3" : IECore.curry( self.__numeric3AttrHandler, attributeType="float3" ),
"long3" : IECore.curry( self.__numeric3AttrHandler, attributeType="long3" ),
"short3" : IECore.curry( self.__numeric3AttrHandler, attributeType="short3" ),
"string" : self.__stringAttrHandler,
}
self.__restoreCommands = []
for attr, value in self.__attributesAndValues.items() :
# check we can handle this type
attrType = maya.cmds.getAttr( attr, type=True )
handler = handlers.get( attrType, None )
if not handler :
raise TypeError( "Attribute \"%s\" has unsupported type \"%s\"." % ( attr, attrType ) )
# store a command to restore the attribute value later
origValue = maya.cmds.getAttr( attr )
if isinstance( origValue, list ) and isinstance( origValue[0], tuple ) :
origValue = origValue[0]
self.__restoreCommands.append( IECore.curry( handler, attr, origValue ) )
# and change the attribute value
handler( attr, value )
def __exit__( self, type, value, traceBack ) :
for cmd in self.__restoreCommands :
cmd()
def __simpleAttrHandler( self, attr, value ) :
maya.cmds.setAttr( attr, value )
def __numeric2AttrHandler( self, attr, value, attributeType ) :
maya.cmds.setAttr( attr, value[0], value[1], type=attributeType )
def __numeric3AttrHandler( self, attr, value, attributeType ) :
maya.cmds.setAttr( attr, value[0], value[1], value[2], type=attributeType )
def __stringAttrHandler( self, attr, value ) :
maya.cmds.setAttr( attr, value, type="string" )
| bsd-3-clause | 5,890,106,068,330,042,000 | 1,649,959,582,262,130,700 | 38.435185 | 91 | 0.694294 | false |
jeremiahmarks/sl4a | python/src/Lib/test/test_netrc.py | 99 | 1116 |
import netrc, os, unittest, sys
from test import test_support
TEST_NETRC = """
machine foo login log1 password pass1 account acct1
macdef macro1
line1
line2
macdef macro2
line3
line4
default login log2 password pass2
"""
temp_filename = test_support.TESTFN
class NetrcTestCase(unittest.TestCase):
def setUp (self):
mode = 'w'
if sys.platform not in ['cygwin']:
mode += 't'
fp = open(temp_filename, mode)
fp.write(TEST_NETRC)
fp.close()
self.netrc = netrc.netrc(temp_filename)
def tearDown (self):
del self.netrc
os.unlink(temp_filename)
def test_case_1(self):
self.assert_(self.netrc.macros == {'macro1':['line1\n', 'line2\n'],
'macro2':['line3\n', 'line4\n']}
)
self.assert_(self.netrc.hosts['foo'] == ('log1', 'acct1', 'pass1'))
self.assert_(self.netrc.hosts['default'] == ('log2', None, 'pass2'))
def test_main():
test_support.run_unittest(NetrcTestCase)
if __name__ == "__main__":
test_main()
| apache-2.0 | -5,232,915,685,064,787,000 | -3,676,935,421,023,922,700 | 22.25 | 76 | 0.567204 | false |
elmerdpadilla/iv | addons/hw_escpos/escpos/exceptions.py | 151 | 1974 | """ ESC/POS Exceptions classes """
import os
class Error(Exception):
""" Base class for ESC/POS errors """
def __init__(self, msg, status=None):
Exception.__init__(self)
self.msg = msg
self.resultcode = 1
if status is not None:
self.resultcode = status
def __str__(self):
return self.msg
# Result/Exit codes
# 0 = success
# 10 = No Barcode type defined
# 20 = Barcode size values are out of range
# 30 = Barcode text not supplied
# 40 = Image height is too large
# 50 = No string supplied to be printed
# 60 = Invalid pin to send Cash Drawer pulse
class BarcodeTypeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 10
def __str__(self):
return "No Barcode type is defined"
class BarcodeSizeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 20
def __str__(self):
return "Barcode size is out of range"
class BarcodeCodeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 30
def __str__(self):
return "Code was not supplied"
class ImageSizeError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 40
def __str__(self):
return "Image height is longer than 255px and can't be printed"
class TextError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 50
def __str__(self):
return "Text string must be supplied to the text() method"
class CashDrawerError(Error):
def __init__(self, msg=""):
Error.__init__(self, msg)
self.msg = msg
self.resultcode = 60
def __str__(self):
return "Valid pin must be set to send pulse"
| agpl-3.0 | 2,882,984,028,966,692,400 | -2,139,606,206,138,731,500 | 23.675 | 71 | 0.576494 | false |
nuagenetworks/vspk-python | vspk/v5_0/nuvirtualfirewallrule.py | 1 | 40053 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUVirtualFirewallRule(NURESTObject):
""" Represents a VirtualFirewallRule in the VSD
Notes:
Virtual firewall rules define intent based security policy entries to control traffic between source/destinations in the network. Virtual firewall rules are inherently stateful and are enforced as Ingress/Egress stateful ACLs in Nuage policy enforcement points
"""
__rest_name__ = "virtualfirewallrule"
__resource_name__ = "virtualfirewallrules"
## Constants
CONST_NETWORK_TYPE_NETWORK_MACRO_GROUP = "NETWORK_MACRO_GROUP"
CONST_NETWORK_TYPE_ENTERPRISE_NETWORK = "ENTERPRISE_NETWORK"
CONST_LOCATION_TYPE_ZONE = "ZONE"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_ACTION_FORWARD = "FORWARD"
CONST_NETWORK_TYPE_POLICYGROUP = "POLICYGROUP"
CONST_LOCATION_TYPE_UNDERLAY_INTERNET_POLICYGROUP = "UNDERLAY_INTERNET_POLICYGROUP"
CONST_LOCATION_TYPE_ANY = "ANY"
CONST_LOCATION_TYPE_PGEXPRESSION = "PGEXPRESSION"
CONST_ACTION_DROP = "DROP"
CONST_LOCATION_TYPE_ENTERPRISE_NETWORK = "ENTERPRISE_NETWORK"
CONST_NETWORK_TYPE_ANY = "ANY"
CONST_LOCATION_TYPE_POLICYGROUP = "POLICYGROUP"
CONST_NETWORK_TYPE_SUBNET = "SUBNET"
CONST_LOCATION_TYPE_NETWORK_MACRO_GROUP = "NETWORK_MACRO_GROUP"
CONST_NETWORK_TYPE_ZONE = "ZONE"
CONST_ASSOCIATED_TRAFFIC_TYPE_L4_SERVICE_GROUP = "L4_SERVICE_GROUP"
CONST_LOCATION_TYPE_SUBNET = "SUBNET"
CONST_POLICY_STATE_DRAFT = "DRAFT"
CONST_ASSOCIATED_TRAFFIC_TYPE_L4_SERVICE = "L4_SERVICE"
CONST_WEB_FILTER_TYPE_WEB_DOMAIN_NAME = "WEB_DOMAIN_NAME"
CONST_POLICY_STATE_LIVE = "LIVE"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_WEB_FILTER_TYPE_WEB_CATEGORY = "WEB_CATEGORY"
CONST_NETWORK_TYPE_PGEXPRESSION = "PGEXPRESSION"
CONST_NETWORK_TYPE_INTERNET_POLICYGROUP = "INTERNET_POLICYGROUP"
def __init__(self, **kwargs):
""" Initializes a VirtualFirewallRule instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> virtualfirewallrule = NUVirtualFirewallRule(id=u'xxxx-xxx-xxx-xxx', name=u'VirtualFirewallRule')
>>> virtualfirewallrule = NUVirtualFirewallRule(data=my_dict)
"""
super(NUVirtualFirewallRule, self).__init__()
# Read/Write Attributes
self._acl_template_name = None
self._icmp_code = None
self._icmp_type = None
self._ipv6_address_override = None
self._dscp = None
self._last_updated_by = None
self._action = None
self._address_override = None
self._web_filter_id = None
self._web_filter_type = None
self._description = None
self._destination_port = None
self._network_id = None
self._network_type = None
self._mirror_destination_id = None
self._flow_logging_enabled = None
self._enterprise_name = None
self._entity_scope = None
self._location_id = None
self._location_type = None
self._policy_state = None
self._domain_name = None
self._source_port = None
self._priority = None
self._protocol = None
self._associated_egress_entry_id = None
self._associated_ingress_entry_id = None
self._associated_l7_application_signature_id = None
self._associated_live_entity_id = None
self._associated_live_template_id = None
self._associated_traffic_type = None
self._associated_traffic_type_id = None
self._stateful = None
self._stats_id = None
self._stats_logging_enabled = None
self._ether_type = None
self._overlay_mirror_destination_id = None
self._external_id = None
self.expose_attribute(local_name="acl_template_name", remote_name="ACLTemplateName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="icmp_code", remote_name="ICMPCode", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="icmp_type", remote_name="ICMPType", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="ipv6_address_override", remote_name="IPv6AddressOverride", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="dscp", remote_name="DSCP", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="action", remote_name="action", attribute_type=str, is_required=True, is_unique=False, choices=[u'DROP', u'FORWARD'])
self.expose_attribute(local_name="address_override", remote_name="addressOverride", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="web_filter_id", remote_name="webFilterID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="web_filter_type", remote_name="webFilterType", attribute_type=str, is_required=False, is_unique=False, choices=[u'WEB_CATEGORY', u'WEB_DOMAIN_NAME'])
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="destination_port", remote_name="destinationPort", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="network_id", remote_name="networkID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="network_type", remote_name="networkType", attribute_type=str, is_required=False, is_unique=False, choices=[u'ANY', u'ENTERPRISE_NETWORK', u'INTERNET_POLICYGROUP', u'NETWORK_MACRO_GROUP', u'PGEXPRESSION', u'POLICYGROUP', u'SUBNET', u'ZONE'])
self.expose_attribute(local_name="mirror_destination_id", remote_name="mirrorDestinationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="flow_logging_enabled", remote_name="flowLoggingEnabled", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="enterprise_name", remote_name="enterpriseName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="location_id", remote_name="locationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="location_type", remote_name="locationType", attribute_type=str, is_required=True, is_unique=False, choices=[u'ANY', u'ENTERPRISE_NETWORK', u'NETWORK_MACRO_GROUP', u'PGEXPRESSION', u'POLICYGROUP', u'SUBNET', u'UNDERLAY_INTERNET_POLICYGROUP', u'ZONE'])
self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE'])
self.expose_attribute(local_name="domain_name", remote_name="domainName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="source_port", remote_name="sourcePort", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="protocol", remote_name="protocol", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_egress_entry_id", remote_name="associatedEgressEntryID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_ingress_entry_id", remote_name="associatedIngressEntryID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_l7_application_signature_id", remote_name="associatedL7ApplicationSignatureID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_live_template_id", remote_name="associatedLiveTemplateID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_traffic_type", remote_name="associatedTrafficType", attribute_type=str, is_required=False, is_unique=False, choices=[u'L4_SERVICE', u'L4_SERVICE_GROUP'])
self.expose_attribute(local_name="associated_traffic_type_id", remote_name="associatedTrafficTypeID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="stateful", remote_name="stateful", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats_id", remote_name="statsID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats_logging_enabled", remote_name="statsLoggingEnabled", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="ether_type", remote_name="etherType", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="overlay_mirror_destination_id", remote_name="overlayMirrorDestinationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def acl_template_name(self):
""" Get acl_template_name value.
Notes:
The name of the parent template for this rule entry
This attribute is named `ACLTemplateName` in VSD API.
"""
return self._acl_template_name
@acl_template_name.setter
def acl_template_name(self, value):
""" Set acl_template_name value.
Notes:
The name of the parent template for this rule entry
This attribute is named `ACLTemplateName` in VSD API.
"""
self._acl_template_name = value
@property
def icmp_code(self):
""" Get icmp_code value.
Notes:
The ICMP Code when protocol selected is ICMP.
This attribute is named `ICMPCode` in VSD API.
"""
return self._icmp_code
@icmp_code.setter
def icmp_code(self, value):
""" Set icmp_code value.
Notes:
The ICMP Code when protocol selected is ICMP.
This attribute is named `ICMPCode` in VSD API.
"""
self._icmp_code = value
@property
def icmp_type(self):
""" Get icmp_type value.
Notes:
The ICMP Type when protocol selected is ICMP.
This attribute is named `ICMPType` in VSD API.
"""
return self._icmp_type
@icmp_type.setter
def icmp_type(self, value):
""" Set icmp_type value.
Notes:
The ICMP Type when protocol selected is ICMP.
This attribute is named `ICMPType` in VSD API.
"""
self._icmp_type = value
@property
def ipv6_address_override(self):
""" Get ipv6_address_override value.
Notes:
Overrides the source IPV6 for Ingress and destination IPV6 for Egress, macentries will use this address as the match criteria.
This attribute is named `IPv6AddressOverride` in VSD API.
"""
return self._ipv6_address_override
@ipv6_address_override.setter
def ipv6_address_override(self, value):
""" Set ipv6_address_override value.
Notes:
Overrides the source IPV6 for Ingress and destination IPV6 for Egress, macentries will use this address as the match criteria.
This attribute is named `IPv6AddressOverride` in VSD API.
"""
self._ipv6_address_override = value
@property
def dscp(self):
""" Get dscp value.
Notes:
DSCP match condition to be set in the rule. It is either * or from 0-63
This attribute is named `DSCP` in VSD API.
"""
return self._dscp
@dscp.setter
def dscp(self, value):
""" Set dscp value.
Notes:
DSCP match condition to be set in the rule. It is either * or from 0-63
This attribute is named `DSCP` in VSD API.
"""
self._dscp = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def action(self):
""" Get action value.
Notes:
The action of the rule, DROP or FORWARD. Possible values are DROP, FORWARD.
"""
return self._action
@action.setter
def action(self, value):
""" Set action value.
Notes:
The action of the rule, DROP or FORWARD. Possible values are DROP, FORWARD.
"""
self._action = value
@property
def address_override(self):
""" Get address_override value.
Notes:
Overrides the source IP for Ingress and destination IP for Egress, macentries will use this address as the match criteria.
This attribute is named `addressOverride` in VSD API.
"""
return self._address_override
@address_override.setter
def address_override(self, value):
""" Set address_override value.
Notes:
Overrides the source IP for Ingress and destination IP for Egress, macentries will use this address as the match criteria.
This attribute is named `addressOverride` in VSD API.
"""
self._address_override = value
@property
def web_filter_id(self):
""" Get web_filter_id value.
Notes:
ID of web filter
This attribute is named `webFilterID` in VSD API.
"""
return self._web_filter_id
@web_filter_id.setter
def web_filter_id(self, value):
""" Set web_filter_id value.
Notes:
ID of web filter
This attribute is named `webFilterID` in VSD API.
"""
self._web_filter_id = value
@property
def web_filter_type(self):
""" Get web_filter_type value.
Notes:
Indicates type of web filter being set
This attribute is named `webFilterType` in VSD API.
"""
return self._web_filter_type
@web_filter_type.setter
def web_filter_type(self, value):
""" Set web_filter_type value.
Notes:
Indicates type of web filter being set
This attribute is named `webFilterType` in VSD API.
"""
self._web_filter_type = value
@property
def description(self):
""" Get description value.
Notes:
Description of the rule entry
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the rule entry
"""
self._description = value
@property
def destination_port(self):
""" Get destination_port value.
Notes:
The destination port to be matched if protocol is UDP or TCP. Value should be either * or a single port number or a port range like 1,2.. or 1 - 10
This attribute is named `destinationPort` in VSD API.
"""
return self._destination_port
@destination_port.setter
def destination_port(self, value):
""" Set destination_port value.
Notes:
The destination port to be matched if protocol is UDP or TCP. Value should be either * or a single port number or a port range like 1,2.. or 1 - 10
This attribute is named `destinationPort` in VSD API.
"""
self._destination_port = value
@property
def network_id(self):
""" Get network_id value.
Notes:
The ID of the destination endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `networkID` in VSD API.
"""
return self._network_id
@network_id.setter
def network_id(self, value):
""" Set network_id value.
Notes:
The ID of the destination endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `networkID` in VSD API.
"""
self._network_id = value
@property
def network_type(self):
""" Get network_type value.
Notes:
Type of the destination endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `networkType` in VSD API.
"""
return self._network_type
@network_type.setter
def network_type(self, value):
""" Set network_type value.
Notes:
Type of the destination endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `networkType` in VSD API.
"""
self._network_type = value
@property
def mirror_destination_id(self):
""" Get mirror_destination_id value.
Notes:
Destination ID of the mirror destination object.
This attribute is named `mirrorDestinationID` in VSD API.
"""
return self._mirror_destination_id
@mirror_destination_id.setter
def mirror_destination_id(self, value):
""" Set mirror_destination_id value.
Notes:
Destination ID of the mirror destination object.
This attribute is named `mirrorDestinationID` in VSD API.
"""
self._mirror_destination_id = value
@property
def flow_logging_enabled(self):
""" Get flow_logging_enabled value.
Notes:
Is flow logging enabled for this particular template
This attribute is named `flowLoggingEnabled` in VSD API.
"""
return self._flow_logging_enabled
@flow_logging_enabled.setter
def flow_logging_enabled(self, value):
""" Set flow_logging_enabled value.
Notes:
Is flow logging enabled for this particular template
This attribute is named `flowLoggingEnabled` in VSD API.
"""
self._flow_logging_enabled = value
@property
def enterprise_name(self):
""" Get enterprise_name value.
Notes:
The name of the enterprise for the domain's parent
This attribute is named `enterpriseName` in VSD API.
"""
return self._enterprise_name
@enterprise_name.setter
def enterprise_name(self, value):
""" Set enterprise_name value.
Notes:
The name of the enterprise for the domain's parent
This attribute is named `enterpriseName` in VSD API.
"""
self._enterprise_name = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def location_id(self):
""" Get location_id value.
Notes:
The ID of the source endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `locationID` in VSD API.
"""
return self._location_id
@location_id.setter
def location_id(self, value):
""" Set location_id value.
Notes:
The ID of the source endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `locationID` in VSD API.
"""
self._location_id = value
@property
def location_type(self):
""" Get location_type value.
Notes:
Type of the source endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `locationType` in VSD API.
"""
return self._location_type
@location_type.setter
def location_type(self, value):
""" Set location_type value.
Notes:
Type of the source endpoint (Subnet/Zone/PortGroup/PolicyGroupExpression/NetworkMacro/Internet Policy Group/Enterprise Network)
This attribute is named `locationType` in VSD API.
"""
self._location_type = value
@property
def policy_state(self):
""" Get policy_state value.
Notes:
State of the policy.
This attribute is named `policyState` in VSD API.
"""
return self._policy_state
@policy_state.setter
def policy_state(self, value):
""" Set policy_state value.
Notes:
State of the policy.
This attribute is named `policyState` in VSD API.
"""
self._policy_state = value
@property
def domain_name(self):
""" Get domain_name value.
Notes:
The name of the domain/domain template for the Rule TemplateName.
This attribute is named `domainName` in VSD API.
"""
return self._domain_name
@domain_name.setter
def domain_name(self, value):
""" Set domain_name value.
Notes:
The name of the domain/domain template for the Rule TemplateName.
This attribute is named `domainName` in VSD API.
"""
self._domain_name = value
@property
def source_port(self):
""" Get source_port value.
Notes:
Source port to be matched if protocol is UDP or TCP. Value should be either * or a single port number or a port range like 1,2.. or 1 - 10
This attribute is named `sourcePort` in VSD API.
"""
return self._source_port
@source_port.setter
def source_port(self, value):
""" Set source_port value.
Notes:
Source port to be matched if protocol is UDP or TCP. Value should be either * or a single port number or a port range like 1,2.. or 1 - 10
This attribute is named `sourcePort` in VSD API.
"""
self._source_port = value
@property
def priority(self):
""" Get priority value.
Notes:
The priority of the rule entry that determines the order of entries
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
The priority of the rule entry that determines the order of entries
"""
self._priority = value
@property
def protocol(self):
""" Get protocol value.
Notes:
Protocol number that must be matched
"""
return self._protocol
@protocol.setter
def protocol(self, value):
""" Set protocol value.
Notes:
Protocol number that must be matched
"""
self._protocol = value
@property
def associated_egress_entry_id(self):
""" Get associated_egress_entry_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedEgressEntryID` in VSD API.
"""
return self._associated_egress_entry_id
@associated_egress_entry_id.setter
def associated_egress_entry_id(self, value):
""" Set associated_egress_entry_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedEgressEntryID` in VSD API.
"""
self._associated_egress_entry_id = value
@property
def associated_ingress_entry_id(self):
""" Get associated_ingress_entry_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedIngressEntryID` in VSD API.
"""
return self._associated_ingress_entry_id
@associated_ingress_entry_id.setter
def associated_ingress_entry_id(self, value):
""" Set associated_ingress_entry_id value.
Notes:
In the draft mode, the ACL entry refers to this LiveEntity. In non-drafted mode, this is null.
This attribute is named `associatedIngressEntryID` in VSD API.
"""
self._associated_ingress_entry_id = value
@property
def associated_l7_application_signature_id(self):
""" Get associated_l7_application_signature_id value.
Notes:
The UUID of the associated L7 Application Signature
This attribute is named `associatedL7ApplicationSignatureID` in VSD API.
"""
return self._associated_l7_application_signature_id
@associated_l7_application_signature_id.setter
def associated_l7_application_signature_id(self, value):
""" Set associated_l7_application_signature_id value.
Notes:
The UUID of the associated L7 Application Signature
This attribute is named `associatedL7ApplicationSignatureID` in VSD API.
"""
self._associated_l7_application_signature_id = value
@property
def associated_live_entity_id(self):
""" Get associated_live_entity_id value.
Notes:
In the draft mode, the rule entry refers to this LiveEntity. In live mode, this is null.
This attribute is named `associatedLiveEntityID` in VSD API.
"""
return self._associated_live_entity_id
@associated_live_entity_id.setter
def associated_live_entity_id(self, value):
""" Set associated_live_entity_id value.
Notes:
In the draft mode, the rule entry refers to this LiveEntity. In live mode, this is null.
This attribute is named `associatedLiveEntityID` in VSD API.
"""
self._associated_live_entity_id = value
@property
def associated_live_template_id(self):
""" Get associated_live_template_id value.
Notes:
In the draft mode, the ACL entity refers to this live entity parent. In non-drafted mode, this is null
This attribute is named `associatedLiveTemplateID` in VSD API.
"""
return self._associated_live_template_id
@associated_live_template_id.setter
def associated_live_template_id(self, value):
""" Set associated_live_template_id value.
Notes:
In the draft mode, the ACL entity refers to this live entity parent. In non-drafted mode, this is null
This attribute is named `associatedLiveTemplateID` in VSD API.
"""
self._associated_live_template_id = value
@property
def associated_traffic_type(self):
""" Get associated_traffic_type value.
Notes:
This property reflects the type of traffic in case a rule entry is created using an Service or Service Group. In case a protocol and port are specified for the ACL entry, this property has to be empty (null). Supported values are L4_SERVICE, L4_SERVICE_GROUP and empty.
This attribute is named `associatedTrafficType` in VSD API.
"""
return self._associated_traffic_type
@associated_traffic_type.setter
def associated_traffic_type(self, value):
""" Set associated_traffic_type value.
Notes:
This property reflects the type of traffic in case a rule entry is created using an Service or Service Group. In case a protocol and port are specified for the ACL entry, this property has to be empty (null). Supported values are L4_SERVICE, L4_SERVICE_GROUP and empty.
This attribute is named `associatedTrafficType` in VSD API.
"""
self._associated_traffic_type = value
@property
def associated_traffic_type_id(self):
""" Get associated_traffic_type_id value.
Notes:
If a traffic type is specified as Service or Service Group, then the associated Id of Service / Service Group should be specifed here
This attribute is named `associatedTrafficTypeID` in VSD API.
"""
return self._associated_traffic_type_id
@associated_traffic_type_id.setter
def associated_traffic_type_id(self, value):
""" Set associated_traffic_type_id value.
Notes:
If a traffic type is specified as Service or Service Group, then the associated Id of Service / Service Group should be specifed here
This attribute is named `associatedTrafficTypeID` in VSD API.
"""
self._associated_traffic_type_id = value
@property
def stateful(self):
""" Get stateful value.
Notes:
True means that this ACL entry is stateful, so there will be a corresponding rule that will be created by OVS in the network. False means that there is no corresponding rule created by OVS in the network.
"""
return self._stateful
@stateful.setter
def stateful(self, value):
""" Set stateful value.
Notes:
True means that this ACL entry is stateful, so there will be a corresponding rule that will be created by OVS in the network. False means that there is no corresponding rule created by OVS in the network.
"""
self._stateful = value
@property
def stats_id(self):
""" Get stats_id value.
Notes:
The statsID that is created in the VSD and identifies this Rule Template Entry. This is auto-generated by VSD
This attribute is named `statsID` in VSD API.
"""
return self._stats_id
@stats_id.setter
def stats_id(self, value):
""" Set stats_id value.
Notes:
The statsID that is created in the VSD and identifies this Rule Template Entry. This is auto-generated by VSD
This attribute is named `statsID` in VSD API.
"""
self._stats_id = value
@property
def stats_logging_enabled(self):
""" Get stats_logging_enabled value.
Notes:
Is stats logging enabled for this particular template
This attribute is named `statsLoggingEnabled` in VSD API.
"""
return self._stats_logging_enabled
@stats_logging_enabled.setter
def stats_logging_enabled(self, value):
""" Set stats_logging_enabled value.
Notes:
Is stats logging enabled for this particular template
This attribute is named `statsLoggingEnabled` in VSD API.
"""
self._stats_logging_enabled = value
@property
def ether_type(self):
""" Get ether_type value.
Notes:
Ether type of the packet to be matched. etherType can be * or a valid hexadecimal value
This attribute is named `etherType` in VSD API.
"""
return self._ether_type
@ether_type.setter
def ether_type(self, value):
""" Set ether_type value.
Notes:
Ether type of the packet to be matched. etherType can be * or a valid hexadecimal value
This attribute is named `etherType` in VSD API.
"""
self._ether_type = value
@property
def overlay_mirror_destination_id(self):
""" Get overlay_mirror_destination_id value.
Notes:
ID of the overlay mirror destination
This attribute is named `overlayMirrorDestinationID` in VSD API.
"""
return self._overlay_mirror_destination_id
@overlay_mirror_destination_id.setter
def overlay_mirror_destination_id(self, value):
""" Set overlay_mirror_destination_id value.
Notes:
ID of the overlay mirror destination
This attribute is named `overlayMirrorDestinationID` in VSD API.
"""
self._overlay_mirror_destination_id = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| bsd-3-clause | 6,000,105,335,635,882,000 | -3,402,118,865,713,481,700 | 31.777414 | 292 | 0.587896 | false |
jomolinare/kobocat | onadata/apps/stats/views.py | 3 | 1326 | from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from onadata.apps.logger.models import XForm
from onadata.apps.stats.utils import get_form_submissions_per_day
@login_required
def stats(request, username=None, id_string=None):
if id_string:
xform = get_object_or_404(
XForm, user=request.user, id_string__exact=id_string)
data = {
'xform': xform,
'context.submission_stats': get_form_submissions_per_day(xform)
}
else:
data = {'xforms': XForm.objects.filter(user=request.user)}
return render(request, 'form-stats.html', data)
@staff_member_required
def submissions(request):
stats = {}
stats['submission_count'] = {}
stats['submission_count']['total_submission_count'] = 0
users = User.objects.all()
for user in users:
stats['submission_count'][user.username] = 0
stats['submission_count'][user.username] += user.instances.count()
stats['submission_count'][
'total_submission_count'] += user.instances.count()
return render(request, "submissions.html", {'stats': stats})
| bsd-2-clause | -4,155,063,468,887,802,400 | -151,250,551,642,409,540 | 33.894737 | 75 | 0.683258 | false |
rangma/p2pool | p2pool/test/test_p2p.py | 269 | 2724 | import random
from twisted.internet import defer, endpoints, protocol, reactor
from twisted.trial import unittest
from p2pool import networks, p2p
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import deferral
class Test(unittest.TestCase):
@defer.inlineCallbacks
def test_sharereq(self):
class MyNode(p2p.Node):
def __init__(self, df):
p2p.Node.__init__(self, lambda: None, 29333, networks.nets['bitcoin'], {}, set([('127.0.0.1', 9333)]), 0, 0, 0, 0)
self.df = df
def handle_share_hashes(self, hashes, peer):
peer.get_shares(
hashes=[hashes[0]],
parents=5,
stops=[],
).chainDeferred(self.df)
df = defer.Deferred()
n = MyNode(df)
n.start()
try:
yield df
finally:
yield n.stop()
@defer.inlineCallbacks
def test_tx_limit(self):
class MyNode(p2p.Node):
def __init__(self, df):
p2p.Node.__init__(self, lambda: None, 29333, networks.nets['bitcoin'], {}, set([('127.0.0.1', 9333)]), 0, 0, 0, 0)
self.df = df
self.sent_time = 0
@defer.inlineCallbacks
def got_conn(self, conn):
p2p.Node.got_conn(self, conn)
yield deferral.sleep(.5)
new_mining_txs = dict(self.mining_txs_var.value)
for i in xrange(3):
huge_tx = dict(
version=0,
tx_ins=[],
tx_outs=[dict(
value=0,
script='x'*900000,
)],
lock_time=i,
)
new_mining_txs[bitcoin_data.hash256(bitcoin_data.tx_type.pack(huge_tx))] = huge_tx
self.mining_txs_var.set(new_mining_txs)
self.sent_time = reactor.seconds()
def lost_conn(self, conn, reason):
self.df.callback(None)
try:
p2p.Protocol.max_remembered_txs_size *= 10
df = defer.Deferred()
n = MyNode(df)
n.start()
yield df
if not (n.sent_time <= reactor.seconds() <= n.sent_time + 1):
raise ValueError('node did not disconnect within 1 seconds of receiving too much tx data')
yield n.stop()
finally:
p2p.Protocol.max_remembered_txs_size //= 10
| gpl-3.0 | 110,584,510,606,613,060 | 2,265,448,900,036,900,000 | 33.481013 | 130 | 0.459618 | false |
lshain-android-source/external-chromium_org | third_party/tlslite/tlslite/utils/keyfactory.py | 361 | 8791 | """Factory functions for asymmetric cryptography.
@sort: generateRSAKey, parseXMLKey, parsePEMKey, parseAsPublicKey,
parseAsPrivateKey
"""
from compat import *
from RSAKey import RSAKey
from Python_RSAKey import Python_RSAKey
import cryptomath
if cryptomath.m2cryptoLoaded:
from OpenSSL_RSAKey import OpenSSL_RSAKey
if cryptomath.pycryptoLoaded:
from PyCrypto_RSAKey import PyCrypto_RSAKey
# **************************************************************************
# Factory Functions for RSA Keys
# **************************************************************************
def generateRSAKey(bits, implementations=["openssl", "python"]):
"""Generate an RSA key with the specified bit length.
@type bits: int
@param bits: Desired bit length of the new key's modulus.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
@return: A new RSA private key.
"""
for implementation in implementations:
if implementation == "openssl" and cryptomath.m2cryptoLoaded:
return OpenSSL_RSAKey.generate(bits)
elif implementation == "python":
return Python_RSAKey.generate(bits)
raise ValueError("No acceptable implementations")
def parseXMLKey(s, private=False, public=False, implementations=["python"]):
"""Parse an XML-format key.
The XML format used here is specific to tlslite and cryptoIDlib. The
format can store the public component of a key, or the public and
private components. For example::
<publicKey xmlns="http://trevp.net/rsa">
<n>4a5yzB8oGNlHo866CAspAC47M4Fvx58zwK8pou...
<e>Aw==</e>
</publicKey>
<privateKey xmlns="http://trevp.net/rsa">
<n>4a5yzB8oGNlHo866CAspAC47M4Fvx58zwK8pou...
<e>Aw==</e>
<d>JZ0TIgUxWXmL8KJ0VqyG1V0J3ern9pqIoB0xmy...
<p>5PreIj6z6ldIGL1V4+1C36dQFHNCQHJvW52GXc...
<q>/E/wDit8YXPCxx126zTq2ilQ3IcW54NJYyNjiZ...
<dP>mKc+wX8inDowEH45Qp4slRo1YveBgExKPROu6...
<dQ>qDVKtBz9lk0shL5PR3ickXDgkwS576zbl2ztB...
<qInv>j6E8EA7dNsTImaXexAmLA1DoeArsYeFAInr...
</privateKey>
@type s: str
@param s: A string containing an XML public or private key.
@type private: bool
@param private: If True, a L{SyntaxError} will be raised if the private
key component is not present.
@type public: bool
@param public: If True, the private key component (if present) will be
discarded, so this function will always return a public key.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
@return: An RSA key.
@raise SyntaxError: If the key is not properly formatted.
"""
for implementation in implementations:
if implementation == "python":
key = Python_RSAKey.parseXML(s)
break
else:
raise ValueError("No acceptable implementations")
return _parseKeyHelper(key, private, public)
#Parse as an OpenSSL or Python key
def parsePEMKey(s, private=False, public=False, passwordCallback=None,
implementations=["openssl", "python"]):
"""Parse a PEM-format key.
The PEM format is used by OpenSSL and other tools. The
format is typically used to store both the public and private
components of a key. For example::
-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQDYscuoMzsGmW0pAYsmyHltxB2TdwHS0dImfjCMfaSDkfLdZY5+
dOWORVns9etWnr194mSGA1F0Pls/VJW8+cX9+3vtJV8zSdANPYUoQf0TP7VlJxkH
dSRkUbEoz5bAAs/+970uos7n7iXQIni+3erUTdYEk2iWnMBjTljfgbK/dQIDAQAB
AoGAJHoJZk75aKr7DSQNYIHuruOMdv5ZeDuJvKERWxTrVJqE32/xBKh42/IgqRrc
esBN9ZregRCd7YtxoL+EVUNWaJNVx2mNmezEznrc9zhcYUrgeaVdFO2yBF1889zO
gCOVwrO8uDgeyj6IKa25H6c1N13ih/o7ZzEgWbGG+ylU1yECQQDv4ZSJ4EjSh/Fl
aHdz3wbBa/HKGTjC8iRy476Cyg2Fm8MZUe9Yy3udOrb5ZnS2MTpIXt5AF3h2TfYV
VoFXIorjAkEA50FcJmzT8sNMrPaV8vn+9W2Lu4U7C+K/O2g1iXMaZms5PC5zV5aV
CKXZWUX1fq2RaOzlbQrpgiolhXpeh8FjxwJBAOFHzSQfSsTNfttp3KUpU0LbiVvv
i+spVSnA0O4rq79KpVNmK44Mq67hsW1P11QzrzTAQ6GVaUBRv0YS061td1kCQHnP
wtN2tboFR6lABkJDjxoGRvlSt4SOPr7zKGgrWjeiuTZLHXSAnCY+/hr5L9Q3ZwXG
6x6iBdgLjVIe4BZQNtcCQQDXGv/gWinCNTN3MPWfTW/RGzuMYVmyBFais0/VrgdH
h1dLpztmpQqfyH/zrBXQ9qL/zR4ojS6XYneO/U18WpEe
-----END RSA PRIVATE KEY-----
To generate a key like this with OpenSSL, run::
openssl genrsa 2048 > key.pem
This format also supports password-encrypted private keys. TLS
Lite can only handle password-encrypted private keys when OpenSSL
and M2Crypto are installed. In this case, passwordCallback will be
invoked to query the user for the password.
@type s: str
@param s: A string containing a PEM-encoded public or private key.
@type private: bool
@param private: If True, a L{SyntaxError} will be raised if the
private key component is not present.
@type public: bool
@param public: If True, the private key component (if present) will
be discarded, so this function will always return a public key.
@type passwordCallback: callable
@param passwordCallback: This function will be called, with no
arguments, if the PEM-encoded private key is password-encrypted.
The callback should return the password string. If the password is
incorrect, SyntaxError will be raised. If no callback is passed
and the key is password-encrypted, a prompt will be displayed at
the console.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
@return: An RSA key.
@raise SyntaxError: If the key is not properly formatted.
"""
for implementation in implementations:
if implementation == "openssl" and cryptomath.m2cryptoLoaded:
key = OpenSSL_RSAKey.parse(s, passwordCallback)
break
elif implementation == "python":
key = Python_RSAKey.parsePEM(s)
break
else:
raise ValueError("No acceptable implementations")
return _parseKeyHelper(key, private, public)
def _parseKeyHelper(key, private, public):
if private:
if not key.hasPrivateKey():
raise SyntaxError("Not a private key!")
if public:
return _createPublicKey(key)
if private:
if hasattr(key, "d"):
return _createPrivateKey(key)
else:
return key
return key
def parseAsPublicKey(s):
"""Parse an XML or PEM-formatted public key.
@type s: str
@param s: A string containing an XML or PEM-encoded public or private key.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
@return: An RSA public key.
@raise SyntaxError: If the key is not properly formatted.
"""
try:
return parsePEMKey(s, public=True)
except:
return parseXMLKey(s, public=True)
def parsePrivateKey(s):
"""Parse an XML or PEM-formatted private key.
@type s: str
@param s: A string containing an XML or PEM-encoded private key.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
@return: An RSA private key.
@raise SyntaxError: If the key is not properly formatted.
"""
try:
return parsePEMKey(s, private=True)
except:
return parseXMLKey(s, private=True)
def _createPublicKey(key):
"""
Create a new public key. Discard any private component,
and return the most efficient key possible.
"""
if not isinstance(key, RSAKey):
raise AssertionError()
return _createPublicRSAKey(key.n, key.e)
def _createPrivateKey(key):
"""
Create a new private key. Return the most efficient key possible.
"""
if not isinstance(key, RSAKey):
raise AssertionError()
if not key.hasPrivateKey():
raise AssertionError()
return _createPrivateRSAKey(key.n, key.e, key.d, key.p, key.q, key.dP,
key.dQ, key.qInv)
def _createPublicRSAKey(n, e, implementations = ["openssl", "pycrypto",
"python"]):
for implementation in implementations:
if implementation == "openssl" and cryptomath.m2cryptoLoaded:
return OpenSSL_RSAKey(n, e)
elif implementation == "pycrypto" and cryptomath.pycryptoLoaded:
return PyCrypto_RSAKey(n, e)
elif implementation == "python":
return Python_RSAKey(n, e)
raise ValueError("No acceptable implementations")
def _createPrivateRSAKey(n, e, d, p, q, dP, dQ, qInv,
implementations = ["pycrypto", "python"]):
for implementation in implementations:
if implementation == "pycrypto" and cryptomath.pycryptoLoaded:
return PyCrypto_RSAKey(n, e, d, p, q, dP, dQ, qInv)
elif implementation == "python":
return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
raise ValueError("No acceptable implementations")
| bsd-3-clause | 5,882,701,807,916,199,000 | -4,241,647,922,413,371,000 | 35.176955 | 78 | 0.670458 | false |
abergeron/pylearn2 | pylearn2/training_algorithms/learning_rule.py | 32 | 17648 | """
A module containing different learning rules for use with the SGD training
algorithm.
"""
import numpy as np
import warnings
from theano.compat import six
from theano import config
from theano import tensor as T
from pylearn2.compat import OrderedDict
from pylearn2.space import NullSpace
from pylearn2.train_extensions import TrainExtension
from pylearn2.utils import sharedX
from pylearn2.utils import wraps
from pylearn2.monitor import Monitor
class LearningRule():
"""
A pylearn2 learning rule is an object which computes new parameter values
given (1) a learning rate (2) current parameter values and (3) the current
estimated gradient.
"""
def add_channels_to_monitor(self, monitor, monitoring_dataset):
"""
Method called by the training algorithm, which allows LearningRules to
add monitoring channels.
Parameters
----------
monitor : pylearn2.monitor.Monitor
Monitor object, to which the rule should register additional
monitoring channels.
monitoring_dataset : pylearn2.datasets.dataset.Dataset or dict
Dataset instance or dictionary whose values are Dataset objects.
"""
pass
def get_updates(self, learning_rate, grads, lr_scalers=None):
"""
Provides the symbolic (theano) description of the updates needed to
perform this learning rule.
Parameters
----------
learning_rate : float
Learning rate coefficient.
grads : dict
A dictionary mapping from the model's parameters to their
gradients.
lr_scalers : dict
A dictionary mapping from the model's parameters to a learning
rate multiplier.
Returns
-------
updates : OrderdDict
A dictionary mapping from the old model parameters, to their new
values after a single iteration of the learning rule.
Notes
-----
e.g. for standard SGD, one would return `sgd_rule_updates` defined
below. Note that such a `LearningRule` object is not implemented, as
these updates are implemented by default when the `learning_rule`
parameter of sgd.SGD.__init__ is None.
.. code-block:: python
sgd_rule_updates = OrderedDict()
for (param, grad) in grads.iteritems():
sgd_rule_updates[k] = (param - learning_rate *
lr_scalers.get(param, 1.) * grad)
"""
raise NotImplementedError(str(type(self)) + " does not implement "
"get_updates.")
class Momentum(LearningRule):
"""
Implements momentum as described in Section 9 of
"A Practical Guide to Training Restricted Boltzmann Machines",
Geoffrey Hinton.
Parameters are updated by the formula:
inc := momentum * inc - learning_rate * d cost / d param
param := param + inc
Parameters
----------
init_momentum : float
Initial value for the momentum coefficient. It remains fixed during
training unless used with a `MomentumAdjustor`
extension.
nesterov_momentum: bool
Use the accelerated momentum technique described in:
"Advances in Optimizing Recurrent Networks", Yoshua Bengio, et al.
"""
def __init__(self, init_momentum, nesterov_momentum=False):
assert init_momentum >= 0.
assert init_momentum < 1.
self.momentum = sharedX(init_momentum, 'momentum')
self.nesterov_momentum = nesterov_momentum
def add_channels_to_monitor(self, monitor, monitoring_dataset):
"""
Activates monitoring of the momentum.
Parameters
----------
monitor : pylearn2.monitor.Monitor
Monitor object, to which the rule should register additional
monitoring channels.
monitoring_dataset : pylearn2.datasets.dataset.Dataset or dict
Dataset instance or dictionary whose values are Dataset objects.
"""
monitor.add_channel(
name='momentum',
ipt=None,
val=self.momentum,
data_specs=(NullSpace(), ''),
dataset=monitoring_dataset)
def get_updates(self, learning_rate, grads, lr_scalers=None):
"""
Provides the updates for learning with gradient descent + momentum.
Parameters
----------
learning_rate : float
Learning rate coefficient.
grads : dict
A dictionary mapping from the model's parameters to their
gradients.
lr_scalers : dict
A dictionary mapping from the model's parameters to a learning
rate multiplier.
"""
updates = OrderedDict()
for (param, grad) in six.iteritems(grads):
vel = sharedX(param.get_value() * 0.)
assert param.dtype == vel.dtype
assert grad.dtype == param.dtype
if param.name is not None:
vel.name = 'vel_' + param.name
scaled_lr = learning_rate * lr_scalers.get(param, 1.)
updates[vel] = self.momentum * vel - scaled_lr * grad
inc = updates[vel]
if self.nesterov_momentum:
inc = self.momentum * inc - scaled_lr * grad
assert inc.dtype == vel.dtype
updates[param] = param + inc
return updates
class MomentumAdjustor(TrainExtension):
"""
A TrainExtension that implements a linear momentum schedule.
Parameters
----------
final_momentum : float
The momentum coefficient to use at the end of learning.
start : int
The epoch on which to start growing the momentum coefficient.
saturate : int
The epoch on which the moment should reach its final value.
"""
def __init__(self, final_momentum, start, saturate):
if saturate < start:
raise TypeError("Momentum can't saturate at its maximum value " +
"before it starts increasing.")
self.__dict__.update(locals())
del self.self
self._initialized = False
self._count = 0
def setup(self, model, dataset, algorithm):
"""
Initializes the momentum schedule based on epochs_seen.
Parameters
----------
model : pylearn2.models.Model
The model to which the training algorithm is applied.
dataset : pylearn2.datasets.Dataset
The dataset to which the model is applied.
algorithm : pylearn2.training_algorithms.TrainingAlgorithm
Describes how gradients should be updated.
"""
monitor = Monitor.get_monitor(model)
self._count = monitor.get_epochs_seen()
self._apply_momentum(algorithm)
def on_monitor(self, model, dataset, algorithm):
"""
Updates the momentum according to the linear schedule.
Parameters
----------
model : pylearn2.models.Model
The model to which the training algorithm is applied.
dataset : pylearn2.datasets.Dataset
The dataset to which the model is applied.
algorithm : pylearn2.training_algorithms.TrainingAlgorithm
Describes how gradients should be updated.
"""
self._count += 1
self._apply_momentum(algorithm)
def _apply_momentum(self, algorithm):
"""Updates the momentum on algorithm based on the epochs elapsed."""
if not hasattr(algorithm, 'learning_rule'):
raise ValueError(
'For MomentumAdjustor to work, you need to use a '
'TrainingAlgorithm that supports learning rules '
'(for instance, SGD), and specify a learning_rule '
'(for instance, Momentum) for that training algorithm.')
momentum = algorithm.learning_rule.momentum
if not self._initialized:
self._init_momentum = momentum.get_value()
self._initialized = True
momentum.set_value(np.cast[config.floatX](self.current_momentum()))
def current_momentum(self):
"""Returns the momentum currently desired by the schedule."""
w = self.saturate - self.start
if w == 0:
# saturate=start, so just jump straight to final momentum
if self._count >= self.start:
return self.final_momentum
return self._init_momentum
alpha = float(self._count - self.start) / float(w)
if alpha < 0.:
alpha = 0.
if alpha > 1.:
alpha = 1.
return self._init_momentum * (1 - alpha) + alpha * self.final_momentum
class AdaDelta(LearningRule):
"""
Implements the AdaDelta learning rule as described in:
"AdaDelta: An Adaptive Learning Rate Method", Matthew D. Zeiler.
Parameters
----------
decay : float, optional
Decay rate :math:`\\rho` in Algorithm 1 of the aforementioned
paper.
"""
def __init__(self, decay=0.95):
assert decay >= 0.
assert decay < 1.
self.decay = decay
def get_updates(self, learning_rate, grads, lr_scalers=None):
"""
Compute the AdaDelta updates
Parameters
----------
learning_rate : float
Learning rate coefficient.
grads : dict
A dictionary mapping from the model's parameters to their
gradients.
lr_scalers : dict
A dictionary mapping from the model's parameters to a learning
rate multiplier.
"""
updates = OrderedDict()
for param in grads.keys():
# mean_squared_grad := E[g^2]_{t-1}
mean_square_grad = sharedX(param.get_value() * 0.)
# mean_square_dx := E[(\Delta x)^2]_{t-1}
mean_square_dx = sharedX(param.get_value() * 0.)
if param.name is not None:
mean_square_grad.name = 'mean_square_grad_' + param.name
mean_square_dx.name = 'mean_square_dx_' + param.name
# Accumulate gradient
new_mean_squared_grad = (
self.decay * mean_square_grad +
(1 - self.decay) * T.sqr(grads[param])
)
# Compute update
epsilon = lr_scalers.get(param, 1.) * learning_rate
rms_dx_tm1 = T.sqrt(mean_square_dx + epsilon)
rms_grad_t = T.sqrt(new_mean_squared_grad + epsilon)
delta_x_t = - rms_dx_tm1 / rms_grad_t * grads[param]
# Accumulate updates
new_mean_square_dx = (
self.decay * mean_square_dx +
(1 - self.decay) * T.sqr(delta_x_t)
)
# Apply update
updates[mean_square_grad] = new_mean_squared_grad
updates[mean_square_dx] = new_mean_square_dx
updates[param] = param + delta_x_t
return updates
class AdaGrad(LearningRule):
"""
Implements the AdaGrad learning rule as described in:
"Adaptive subgradient methods for online learning and
stochastic optimization", Duchi J, Hazan E, Singer Y.
Parameters
----------
max_scaling: float, optional
Restrict the gradient scaling coefficient to values
below `max_scaling`. This prevents corner cases (like all-zero weights)
to generate NaNs (see #1496).
"""
def __init__(self, max_scaling=1e5):
assert max_scaling > 0
self.eps = 1. / max_scaling
def get_updates(self, learning_rate, grads, lr_scalers=None):
"""
Compute the AdaGrad updates
Parameters
----------
learning_rate : float
Learning rate coefficient.
grads : dict
A dictionary mapping from the model's parameters to their
gradients.
lr_scalers : dict
A dictionary mapping from the model's parameters to a learning
rate multiplier.
"""
updates = OrderedDict()
for param in grads.keys():
# sum_square_grad := \sum g^2
sum_square_grad = sharedX(param.get_value() * 0.)
if param.name is not None:
sum_square_grad.name = 'sum_square_grad_' + param.name
# Accumulate gradient
new_sum_squared_grad = (
sum_square_grad + T.sqr(grads[param])
)
# Compute update
epsilon = lr_scalers.get(param, 1.) * learning_rate
scale = T.maximum(self.eps, T.sqrt(new_sum_squared_grad))
delta_x_t = (-epsilon / scale * grads[param])
# Apply update
updates[sum_square_grad] = new_sum_squared_grad
updates[param] = param + delta_x_t
return updates
class RMSProp(LearningRule):
"""
Implements the RMSProp learning rule.
The RMSProp learning rule is described by Hinton in `lecture 6
<http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf>`
of the Coursera Neural Networks for Machine Learning course.
In short, Hinton suggests "[the] magnitude of the gradient can be very
different for different weights and can change during learning. This
makes it hard to choose a global learning rate." RMSProp solves this
problem by "[dividing] the learning rate for a weight by a running
average of the magnitudes of recent gradients for that weight."
Parameters
----------
decay : float, optional
Decay constant similar to that used in AdaDelta and Momentum methods.
max_scaling: float, optional
Restrict the RMSProp gradient scaling coefficient to values
below `max_scaling`.
Notes
-----
An instance of this LearningRule should only be used with one
TrainingAlgorithm, and its get_updates method should be called
only once. This is required in order to make the monitoring
channels correctly report the moving averages.
"""
def __init__(self, decay=0.9, max_scaling=1e5):
assert 0. <= decay < 1.
assert max_scaling > 0
self.decay = sharedX(decay, 'decay')
self.epsilon = 1. / max_scaling
self.mean_square_grads = OrderedDict()
@wraps(LearningRule.add_channels_to_monitor)
def add_channels_to_monitor(self, monitor, monitoring_dataset):
"""
The channels added are the min, mean, and max of the
mean_square_grad of each parameter.
"""
channel_mapping = {
'_min': T.min,
'_max': T.max,
'_mean': T.mean
}
for mean_square_grad in self.mean_square_grads.values():
for suffix, op in channel_mapping.items():
monitor.add_channel(
name=(mean_square_grad.name + suffix),
ipt=None,
val=op(mean_square_grad),
data_specs=(NullSpace(), ''),
dataset=monitoring_dataset)
return
def get_updates(self, learning_rate, grads, lr_scalers=None):
"""
Provides the symbolic (theano) description of the updates needed to
perform this learning rule. See Notes for side-effects.
Parameters
----------
learning_rate : float
Learning rate coefficient.
grads : dict
A dictionary mapping from the model's parameters to their
gradients.
lr_scalers : dict
A dictionary mapping from the model's parameters to a learning
rate multiplier.
Returns
-------
updates : OrderdDict
A dictionary mapping from the old model parameters, to their new
values after a single iteration of the learning rule.
Notes
-----
This method has the side effect of storing the moving average
of the square gradient in `self.mean_square_grads`. This is
necessary in order for the monitoring channels to be able
to track the value of these moving averages.
Therefore, this method should only get called once for each
instance of RMSProp.
"""
updates = OrderedDict()
for param in grads:
# mean_squared_grad := E[g^2]_{t-1}
mean_square_grad = sharedX(param.get_value() * 0.)
if param.name is None:
raise ValueError("Model parameters must be named.")
mean_square_grad.name = 'mean_square_grad_' + param.name
if param.name in self.mean_square_grads:
warnings.warn("Calling get_updates more than once on the "
"gradients of `%s` may make monitored values "
"incorrect." % param.name)
# Store variable in self.mean_square_grads for monitoring.
self.mean_square_grads[param.name] = mean_square_grad
# Accumulate gradient
new_mean_squared_grad = (self.decay * mean_square_grad +
(1 - self.decay) * T.sqr(grads[param]))
# Compute update
scaled_lr = lr_scalers.get(param, 1.) * learning_rate
rms_grad_t = T.sqrt(new_mean_squared_grad)
rms_grad_t = T.maximum(rms_grad_t, self.epsilon)
delta_x_t = - scaled_lr * grads[param] / rms_grad_t
# Apply update
updates[mean_square_grad] = new_mean_squared_grad
updates[param] = param + delta_x_t
return updates
| bsd-3-clause | -3,358,071,911,234,114,600 | -5,493,923,930,795,003,000 | 33.603922 | 79 | 0.590662 | false |
optima-ict/odoo | openerp/addons/base/module/module.py | 11 | 38925 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from docutils import nodes
from docutils.core import publish_string
from docutils.transforms import Transform, writer_aux
from docutils.writers.html4css1 import Writer
import importlib
import logging
from operator import attrgetter
import os
import re
import shutil
import tempfile
import urllib
import urllib2
import urlparse
import zipfile
import zipimport
import lxml.html
from openerp.exceptions import UserError
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO # NOQA
import openerp
import openerp.exceptions
from openerp import modules, tools
from openerp.modules.db import create_categories
from openerp.modules import get_module_resource
from openerp.tools import ormcache
from openerp.tools.parse_version import parse_version
from openerp.tools.translate import _
from openerp.tools import html_sanitize
from openerp.osv import osv, orm, fields
from openerp import api, fields as fields2
_logger = logging.getLogger(__name__)
ACTION_DICT = {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'base.module.upgrade',
'target': 'new',
'type': 'ir.actions.act_window',
}
def backup(path, raise_exception=True):
path = os.path.normpath(path)
if not os.path.exists(path):
if not raise_exception:
return None
raise OSError('path does not exists')
cnt = 1
while True:
bck = '%s~%d' % (path, cnt)
if not os.path.exists(bck):
shutil.move(path, bck)
return bck
cnt += 1
class module_category(osv.osv):
_name = "ir.module.category"
_description = "Application"
def _module_nbr(self, cr, uid, ids, prop, unknow_none, context):
cr.execute('SELECT category_id, COUNT(*) \
FROM ir_module_module \
WHERE category_id IN %(ids)s \
OR category_id IN (SELECT id \
FROM ir_module_category \
WHERE parent_id IN %(ids)s) \
GROUP BY category_id', {'ids': tuple(ids)}
)
result = dict(cr.fetchall())
for id in ids:
cr.execute('select id from ir_module_category where parent_id=%s', (id,))
result[id] = sum([result.get(c, 0) for (c,) in cr.fetchall()],
result.get(id, 0))
return result
_columns = {
'name': fields.char("Name", required=True, translate=True, select=True),
'parent_id': fields.many2one('ir.module.category', 'Parent Application', select=True),
'child_ids': fields.one2many('ir.module.category', 'parent_id', 'Child Applications'),
'module_nr': fields.function(_module_nbr, string='Number of Apps', type='integer'),
'module_ids': fields.one2many('ir.module.module', 'category_id', 'Modules'),
'description': fields.text("Description", translate=True),
'sequence': fields.integer('Sequence'),
'visible': fields.boolean('Visible'),
'xml_id': fields.function(osv.osv.get_external_id, type='char', string="External ID"),
}
_order = 'name'
_defaults = {
'visible': 1,
}
class MyFilterMessages(Transform):
"""
Custom docutils transform to remove `system message` for a document and
generate warnings.
(The standard filter removes them based on some `report_level` passed in
the `settings_override` dictionary, but if we use it, we can't see them
and generate warnings.)
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
_logger.warning("docutils' system message present: %s", str(node))
node.parent.remove(node)
class MyWriter(Writer):
"""
Custom docutils html4ccs1 writer that doesn't add the warnings to the
output document.
"""
def get_transforms(self):
return [MyFilterMessages, writer_aux.Admonitions]
class module(osv.osv):
_name = "ir.module.module"
_rec_name = "shortdesc"
_description = "Module"
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
res = super(module, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
result = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'action_server_module_immediate_install')[1]
if view_type == 'form':
if res.get('toolbar',False):
list = [rec for rec in res['toolbar']['action'] if rec.get('id', False) != result]
res['toolbar'] = {'action': list}
return res
@classmethod
def get_module_info(cls, name):
info = {}
try:
info = modules.load_information_from_description_file(name)
except Exception:
_logger.debug('Error when trying to fetch informations for '
'module %s', name, exc_info=True)
return info
def _get_desc(self, cr, uid, ids, field_name=None, arg=None, context=None):
res = dict.fromkeys(ids, '')
for module in self.browse(cr, uid, ids, context=context):
path = get_module_resource(module.name, 'static/description/index.html')
if path:
with tools.file_open(path, 'rb') as desc_file:
doc = desc_file.read()
html = lxml.html.document_fromstring(doc)
for element, attribute, link, pos in html.iterlinks():
if element.get('src') and not '//' in element.get('src') and not 'static/' in element.get('src'):
element.set('src', "/%s/static/description/%s" % (module.name, element.get('src')))
res[module.id] = html_sanitize(lxml.html.tostring(html))
else:
overrides = {
'embed_stylesheet': False,
'doctitle_xform': False,
'output_encoding': 'unicode',
'xml_declaration': False,
}
output = publish_string(source=module.description or '', settings_overrides=overrides, writer=MyWriter())
res[module.id] = html_sanitize(output)
return res
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context=None):
default_version = modules.adapt_version('1.0')
res = dict.fromkeys(ids, default_version)
for m in self.browse(cr, uid, ids):
res[m.id] = self.get_module_info(m.name).get('version', default_version)
return res
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context=None):
res = {}
model_data_obj = self.pool.get('ir.model.data')
dmodels = []
if field_name is None or 'views_by_module' in field_name:
dmodels.append('ir.ui.view')
if field_name is None or 'reports_by_module' in field_name:
dmodels.append('ir.actions.report.xml')
if field_name is None or 'menus_by_module' in field_name:
dmodels.append('ir.ui.menu')
assert dmodels, "no models for %s" % field_name
for module_rec in self.browse(cr, uid, ids, context=context):
res_mod_dic = res[module_rec.id] = {
'menus_by_module': [],
'reports_by_module': [],
'views_by_module': []
}
# Skip uninstalled modules below, no data to find anyway.
if module_rec.state not in ('installed', 'to upgrade', 'to remove'):
continue
# then, search and group ir.model.data records
imd_models = dict([(m, []) for m in dmodels])
imd_ids = model_data_obj.search(cr, uid, [
('module', '=', module_rec.name),
('model', 'in', tuple(dmodels))
])
for imd_res in model_data_obj.read(cr, uid, imd_ids, ['model', 'res_id'], context=context):
imd_models[imd_res['model']].append(imd_res['res_id'])
def browse(model):
M = self.pool[model]
# as this method is called before the module update, some xmlid may be invalid at this stage
# explictly filter records before reading them
ids = M.exists(cr, uid, imd_models.get(model, []), context)
return M.browse(cr, uid, ids, context)
def format_view(v):
aa = v.inherit_id and '* INHERIT ' or ''
return '%s%s (%s)' % (aa, v.name, v.type)
res_mod_dic['views_by_module'] = map(format_view, browse('ir.ui.view'))
res_mod_dic['reports_by_module'] = map(attrgetter('name'), browse('ir.actions.report.xml'))
res_mod_dic['menus_by_module'] = map(attrgetter('complete_name'), browse('ir.ui.menu'))
for key in res.iterkeys():
for k, v in res[key].iteritems():
res[key][k] = "\n".join(sorted(v))
return res
def _get_icon_image(self, cr, uid, ids, field_name=None, arg=None, context=None):
res = dict.fromkeys(ids, '')
for module in self.browse(cr, uid, ids, context=context):
path = get_module_resource(module.name, 'static', 'description', 'icon.png')
if path:
image_file = tools.file_open(path, 'rb')
try:
res[module.id] = image_file.read().encode('base64')
finally:
image_file.close()
return res
_columns = {
'name': fields.char("Technical Name", readonly=True, required=True, select=True),
'category_id': fields.many2one('ir.module.category', 'Category', readonly=True, select=True),
'shortdesc': fields.char('Module Name', readonly=True, translate=True),
'summary': fields.char('Summary', readonly=True, translate=True),
'description': fields.text("Description", readonly=True, translate=True),
'description_html': fields.function(_get_desc, string='Description HTML', type='html', method=True, readonly=True),
'author': fields.char("Author", readonly=True),
'maintainer': fields.char('Maintainer', readonly=True),
'contributors': fields.text('Contributors', readonly=True),
'website': fields.char("Website", readonly=True),
# attention: Incorrect field names !!
# installed_version refers the latest version (the one on disk)
# latest_version refers the installed version (the one in database)
# published_version refers the version available on the repository
'installed_version': fields.function(_get_latest_version, string='Latest Version', type='char'),
'latest_version': fields.char('Installed Version', readonly=True),
'published_version': fields.char('Published Version', readonly=True),
'url': fields.char('URL', readonly=True),
'sequence': fields.integer('Sequence'),
'dependencies_id': fields.one2many('ir.module.module.dependency', 'module_id', 'Dependencies', readonly=True),
'auto_install': fields.boolean('Automatic Installation',
help='An auto-installable module is automatically installed by the '
'system when all its dependencies are satisfied. '
'If the module has no dependency, it is always installed.'),
'state': fields.selection([
('uninstallable', 'Not Installable'),
('uninstalled', 'Not Installed'),
('installed', 'Installed'),
('to upgrade', 'To be upgraded'),
('to remove', 'To be removed'),
('to install', 'To be installed')
], string='Status', readonly=True, select=True),
'demo': fields.boolean('Demo Data', readonly=True),
'license': fields.selection([
('GPL-2', 'GPL Version 2'),
('GPL-2 or any later version', 'GPL-2 or later version'),
('GPL-3', 'GPL Version 3'),
('GPL-3 or any later version', 'GPL-3 or later version'),
('AGPL-3', 'Affero GPL-3'),
('LGPL-3', 'LGPL Version 3'),
('Other OSI approved licence', 'Other OSI Approved Licence'),
('OEEL-1', 'Odoo Enterprise Edition License v1.0'),
('Other proprietary', 'Other Proprietary')
], string='License', readonly=True),
'menus_by_module': fields.function(_get_views, string='Menus', type='text', multi="meta", store=True),
'reports_by_module': fields.function(_get_views, string='Reports', type='text', multi="meta", store=True),
'views_by_module': fields.function(_get_views, string='Views', type='text', multi="meta", store=True),
'application': fields.boolean('Application', readonly=True),
'icon': fields.char('Icon URL'),
'icon_image': fields.function(_get_icon_image, string='Icon', type="binary"),
}
_defaults = {
'state': 'uninstalled',
'sequence': 100,
'demo': False,
'license': 'LGPL-3',
}
_order = 'sequence,name'
def _name_uniq_msg(self, cr, uid, ids, context=None):
return _('The name of the module must be unique !')
_sql_constraints = [
('name_uniq', 'UNIQUE (name)', _name_uniq_msg),
]
def unlink(self, cr, uid, ids, context=None):
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
mod_names = []
for mod in self.read(cr, uid, ids, ['state', 'name'], context):
if mod['state'] in ('installed', 'to upgrade', 'to remove', 'to install'):
raise UserError(_('You try to remove a module that is installed or will be installed'))
mod_names.append(mod['name'])
#Removing the entry from ir_model_data
#ids_meta = self.pool.get('ir.model.data').search(cr, uid, [('name', '=', 'module_meta_information'), ('module', 'in', mod_names)])
#if ids_meta:
# self.pool.get('ir.model.data').unlink(cr, uid, ids_meta, context)
self.clear_caches()
return super(module, self).unlink(cr, uid, ids, context=context)
@staticmethod
def _check_external_dependencies(terp):
depends = terp.get('external_dependencies')
if not depends:
return
for pydep in depends.get('python', []):
try:
importlib.import_module(pydep)
except ImportError:
raise ImportError('No module named %s' % (pydep,))
for binary in depends.get('bin', []):
try:
tools.find_in_path(binary)
except IOError:
raise Exception('Unable to find %r in path' % (binary,))
@classmethod
def check_external_dependencies(cls, module_name, newstate='to install'):
terp = cls.get_module_info(module_name)
try:
cls._check_external_dependencies(terp)
except Exception, e:
if newstate == 'to install':
msg = _('Unable to install module "%s" because an external dependency is not met: %s')
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%s" because an external dependency is not met: %s')
else:
msg = _('Unable to process module "%s" because an external dependency is not met: %s')
raise UserError(msg % (module_name, e.args[0]))
@api.multi
def state_update(self, newstate, states_to_update, level=100):
if level < 1:
raise UserError(_('Recursion error in modules dependencies !'))
# whether some modules are installed with demo data
demo = False
for module in self:
# determine dependency modules to update/others
update_mods, ready_mods = self.browse(), self.browse()
for dep in module.dependencies_id:
if dep.state == 'unknown':
raise UserError(_("You try to install module '%s' that depends on module '%s'.\nBut the latter module is not available in your system.") % (module.name, dep.name,))
if dep.depend_id.state == newstate:
ready_mods += dep.depend_id
else:
update_mods += dep.depend_id
# update dependency modules that require it, and determine demo for module
update_demo = update_mods.state_update(newstate, states_to_update, level=level-1)
module_demo = module.demo or update_demo or any(mod.demo for mod in ready_mods)
demo = demo or module_demo
# check dependencies and update module itself
self.check_external_dependencies(module.name, newstate)
if module.state in states_to_update:
module.write({'state': newstate, 'demo': module_demo})
return demo
@api.multi
def button_install(self):
# domain to select auto-installable (but not yet installed) modules
auto_domain = [('state', '=', 'uninstalled'), ('auto_install', '=', True)]
# determine whether an auto-install module must be installed:
# - all its dependencies are installed or to be installed,
# - at least one dependency is 'to install'
install_states = frozenset(('installed', 'to install', 'to upgrade'))
def must_install(module):
states = set(dep.state for dep in module.dependencies_id)
return states <= install_states and 'to install' in states
modules = self
while modules:
# Mark the given modules and their dependencies to be installed.
modules.state_update('to install', ['uninstalled'])
# Determine which auto-installable modules must be installed.
modules = self.search(auto_domain).filtered(must_install)
# retrieve the installed (or to be installed) theme modules
theme_category = self.env.ref('base.module_category_theme')
theme_modules = self.search([
('state', 'in', list(install_states)),
('category_id', 'child_of', [theme_category.id]),
])
# determine all theme modules that mods depends on, including mods
def theme_deps(mods):
deps = mods.mapped('dependencies_id.depend_id')
while deps:
mods |= deps
deps = deps.mapped('dependencies_id.depend_id')
return mods & theme_modules
if any(module.state == 'to install' for module in theme_modules):
# check: the installation is valid if all installed theme modules
# correspond to one theme module and all its theme dependencies
if not any(theme_deps(module) == theme_modules for module in theme_modules):
state_labels = dict(self.fields_get(['state'])['state']['selection'])
themes_list = [
"- %s (%s)" % (module.shortdesc, state_labels[module.state])
for module in theme_modules
]
raise UserError(_(
"You are trying to install incompatible themes:\n%s\n\n" \
"Please uninstall your current theme before installing another one.\n"
"Warning: switching themes may significantly alter the look of your current website pages!"
) % ("\n".join(themes_list)))
return dict(ACTION_DICT, name=_('Install'))
def button_immediate_install(self, cr, uid, ids, context=None):
""" Installs the selected module(s) immediately and fully,
returns the next res.config action to execute
:param ids: identifiers of the modules to install
:returns: next res.config item to execute
:rtype: dict[str, object]
"""
return self._button_immediate_function(cr, uid, ids, self.button_install, context=context)
def button_install_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'uninstalled', 'demo': False})
return True
def module_uninstall(self, cr, uid, ids, context=None):
"""Perform the various steps required to uninstall a module completely
including the deletion of all database structures created by the module:
tables, columns, constraints, etc."""
ir_model_data = self.pool.get('ir.model.data')
modules_to_remove = [m.name for m in self.browse(cr, uid, ids, context)]
ir_model_data._module_data_uninstall(cr, uid, modules_to_remove, context)
self.write(cr, uid, ids, {'state': 'uninstalled', 'latest_version': False})
return True
def downstream_dependencies(self, cr, uid, ids, known_dep_ids=None,
exclude_states=['uninstalled', 'uninstallable', 'to remove'],
context=None):
"""Return the ids of all modules that directly or indirectly depend
on the given module `ids`, and that satisfy the `exclude_states`
filter"""
if not ids:
return []
known_dep_ids = set(known_dep_ids or [])
cr.execute('''SELECT DISTINCT m.id
FROM
ir_module_module_dependency d
JOIN
ir_module_module m ON (d.module_id=m.id)
WHERE
d.name IN (SELECT name from ir_module_module where id in %s) AND
m.state NOT IN %s AND
m.id NOT IN %s ''',
(tuple(ids), tuple(exclude_states), tuple(known_dep_ids or ids)))
new_dep_ids = set([m[0] for m in cr.fetchall()])
missing_mod_ids = new_dep_ids - known_dep_ids
known_dep_ids |= new_dep_ids
if missing_mod_ids:
known_dep_ids |= set(self.downstream_dependencies(cr, uid, list(missing_mod_ids),
known_dep_ids, exclude_states, context))
return list(known_dep_ids)
def upstream_dependencies(self, cr, uid, ids, known_dep_ids=None,
exclude_states=['installed', 'uninstallable', 'to remove'],
context=None):
""" Return the dependency tree of modules of the given `ids`, and that
satisfy the `exclude_states` filter """
if not ids:
return []
known_dep_ids = set(known_dep_ids or [])
cr.execute('''SELECT DISTINCT m.id
FROM
ir_module_module_dependency d
JOIN
ir_module_module m ON (d.module_id=m.id)
WHERE
m.name IN (SELECT name from ir_module_module_dependency where module_id in %s) AND
m.state NOT IN %s AND
m.id NOT IN %s ''',
(tuple(ids), tuple(exclude_states), tuple(known_dep_ids or ids)))
new_dep_ids = set([m[0] for m in cr.fetchall()])
missing_mod_ids = new_dep_ids - known_dep_ids
known_dep_ids |= new_dep_ids
if missing_mod_ids:
known_dep_ids |= set(self.upstream_dependencies(cr, uid, list(missing_mod_ids),
known_dep_ids, exclude_states, context))
return list(known_dep_ids)
def _button_immediate_function(self, cr, uid, ids, function, context=None):
function(cr, uid, ids, context=context)
cr.commit()
api.Environment.reset()
registry = openerp.modules.registry.RegistryManager.new(cr.dbname, update_module=True)
config = registry['res.config'].next(cr, uid, [], context=context) or {}
if config.get('type') not in ('ir.actions.act_window_close',):
return config
# reload the client; open the first available root menu
menu_obj = registry['ir.ui.menu']
menu_ids = menu_obj.search(cr, uid, [('parent_id', '=', False)], context=context)
return {
'type': 'ir.actions.client',
'tag': 'reload',
'params': {'menu_id': menu_ids and menu_ids[0] or False}
}
#TODO remove me in master, not called anymore
def button_immediate_uninstall(self, cr, uid, ids, context=None):
"""
Uninstall the selected module(s) immediately and fully,
returns the next res.config action to execute
"""
return self._button_immediate_function(cr, uid, ids, self.button_uninstall, context=context)
def button_uninstall(self, cr, uid, ids, context=None):
if any(m.name == 'base' for m in self.browse(cr, uid, ids, context=context)):
raise UserError(_("The `base` module cannot be uninstalled"))
dep_ids = self.downstream_dependencies(cr, uid, ids, context=context)
self.write(cr, uid, ids + dep_ids, {'state': 'to remove'})
return dict(ACTION_DICT, name=_('Uninstall'))
def button_uninstall_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'installed'})
return True
def button_immediate_upgrade(self, cr, uid, ids, context=None):
"""
Upgrade the selected module(s) immediately and fully,
return the next res.config action to execute
"""
return self._button_immediate_function(cr, uid, ids, self.button_upgrade, context=context)
def button_upgrade(self, cr, uid, ids, context=None):
depobj = self.pool.get('ir.module.module.dependency')
todo = list(self.browse(cr, uid, ids, context=context))
self.update_list(cr, uid)
i = 0
while i < len(todo):
mod = todo[i]
i += 1
if mod.state not in ('installed', 'to upgrade'):
raise UserError(_("Can not upgrade module '%s'. It is not installed.") % (mod.name,))
self.check_external_dependencies(mod.name, 'to upgrade')
iids = depobj.search(cr, uid, [('name', '=', mod.name)], context=context)
for dep in depobj.browse(cr, uid, iids, context=context):
if dep.module_id.state == 'installed' and dep.module_id not in todo:
todo.append(dep.module_id)
ids = map(lambda x: x.id, todo)
self.write(cr, uid, ids, {'state': 'to upgrade'}, context=context)
to_install = []
for mod in todo:
for dep in mod.dependencies_id:
if dep.state == 'unknown':
raise UserError(_('You try to upgrade a module that depends on the module: %s.\nBut this module is not available in your system.') % (dep.name,))
if dep.state == 'uninstalled':
ids2 = self.search(cr, uid, [('name', '=', dep.name)])
to_install.extend(ids2)
self.button_install(cr, uid, to_install, context=context)
return dict(ACTION_DICT, name=_('Apply Schedule Upgrade'))
def button_upgrade_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'installed'})
return True
@staticmethod
def get_values_from_terp(terp):
return {
'description': terp.get('description', ''),
'shortdesc': terp.get('name', ''),
'author': terp.get('author', 'Unknown'),
'maintainer': terp.get('maintainer', False),
'contributors': ', '.join(terp.get('contributors', [])) or False,
'website': terp.get('website', ''),
'license': terp.get('license', 'LGPL-3'),
'sequence': terp.get('sequence', 100),
'application': terp.get('application', False),
'auto_install': terp.get('auto_install', False),
'icon': terp.get('icon', False),
'summary': terp.get('summary', ''),
}
def create(self, cr, uid, vals, context=None):
new_id = super(module, self).create(cr, uid, vals, context=context)
module_metadata = {
'name': 'module_%s' % vals['name'],
'model': 'ir.module.module',
'module': 'base',
'res_id': new_id,
'noupdate': True,
}
self.pool['ir.model.data'].create(cr, uid, module_metadata)
return new_id
# update the list of available packages
def update_list(self, cr, uid, context=None):
res = [0, 0] # [update, add]
default_version = modules.adapt_version('1.0')
known_mods = self.browse(cr, uid, self.search(cr, uid, []))
known_mods_names = dict([(m.name, m) for m in known_mods])
# iterate through detected modules and update/create them in db
for mod_name in modules.get_modules():
mod = known_mods_names.get(mod_name)
terp = self.get_module_info(mod_name)
values = self.get_values_from_terp(terp)
if mod:
updated_values = {}
for key in values:
old = getattr(mod, key)
updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key]
if (old or updated) and updated != old:
updated_values[key] = values[key]
if terp.get('installable', True) and mod.state == 'uninstallable':
updated_values['state'] = 'uninstalled'
if parse_version(terp.get('version', default_version)) > parse_version(mod.latest_version or default_version):
res[0] += 1
if updated_values:
self.write(cr, uid, mod.id, updated_values)
else:
mod_path = modules.get_module_path(mod_name)
if not mod_path:
continue
if not terp or not terp.get('installable', True):
continue
id = self.create(cr, uid, dict(name=mod_name, state='uninstalled', **values))
mod = self.browse(cr, uid, id)
res[1] += 1
self._update_dependencies(cr, uid, mod, terp.get('depends', []))
self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized'))
return res
def download(self, cr, uid, ids, download=True, context=None):
return []
def install_from_urls(self, cr, uid, urls, context=None):
if not self.pool['res.users'].has_group(cr, uid, 'base.group_system'):
raise openerp.exceptions.AccessDenied()
apps_server = urlparse.urlparse(self.get_apps_server(cr, uid, context=context))
OPENERP = openerp.release.product_name.lower()
tmp = tempfile.mkdtemp()
_logger.debug('Install from url: %r', urls)
try:
# 1. Download & unzip missing modules
for module_name, url in urls.items():
if not url:
continue # nothing to download, local version is already the last one
up = urlparse.urlparse(url)
if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc:
raise openerp.exceptions.AccessDenied()
try:
_logger.info('Downloading module `%s` from OpenERP Apps', module_name)
content = urllib2.urlopen(url).read()
except Exception:
_logger.exception('Failed to fetch module %s', module_name)
raise UserError(_('The `%s` module appears to be unavailable at the moment, please try again later.') % module_name)
else:
zipfile.ZipFile(StringIO(content)).extractall(tmp)
assert os.path.isdir(os.path.join(tmp, module_name))
# 2a. Copy/Replace module source in addons path
for module_name, url in urls.items():
if module_name == OPENERP or not url:
continue # OPENERP is special case, handled below, and no URL means local module
module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False)
bck = backup(module_path, False)
_logger.info('Copy downloaded module `%s` to `%s`', module_name, module_path)
shutil.move(os.path.join(tmp, module_name), module_path)
if bck:
shutil.rmtree(bck)
# 2b. Copy/Replace server+base module source if downloaded
if urls.get(OPENERP, None):
# special case. it contains the server and the base module.
# extract path is not the same
base_path = os.path.dirname(modules.get_module_path('base'))
# copy all modules in the SERVER/openerp/addons directory to the new "openerp" module (except base itself)
for d in os.listdir(base_path):
if d != 'base' and os.path.isdir(os.path.join(base_path, d)):
destdir = os.path.join(tmp, OPENERP, 'addons', d) # XXX 'openerp' subdirectory ?
shutil.copytree(os.path.join(base_path, d), destdir)
# then replace the server by the new "base" module
server_dir = openerp.tools.config['root_path'] # XXX or dirname()
bck = backup(server_dir)
_logger.info('Copy downloaded module `openerp` to `%s`', server_dir)
shutil.move(os.path.join(tmp, OPENERP), server_dir)
#if bck:
# shutil.rmtree(bck)
self.update_list(cr, uid, context=context)
with_urls = [m for m, u in urls.items() if u]
downloaded_ids = self.search(cr, uid, [('name', 'in', with_urls)], context=context)
already_installed = self.search(cr, uid, [('id', 'in', downloaded_ids), ('state', '=', 'installed')], context=context)
to_install_ids = self.search(cr, uid, [('name', 'in', urls.keys()), ('state', '=', 'uninstalled')], context=context)
post_install_action = self.button_immediate_install(cr, uid, to_install_ids, context=context)
if already_installed:
# in this case, force server restart to reload python code...
cr.commit()
openerp.service.server.restart()
return {
'type': 'ir.actions.client',
'tag': 'home',
'params': {'wait': True},
}
return post_install_action
finally:
shutil.rmtree(tmp)
def get_apps_server(self, cr, uid, context=None):
return tools.config.get('apps_server', 'https://apps.openerp.com/apps')
def _update_dependencies(self, cr, uid, mod_browse, depends=None):
if depends is None:
depends = []
existing = set(x.name for x in mod_browse.dependencies_id)
needed = set(depends)
for dep in (needed - existing):
cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (mod_browse.id, dep))
for dep in (existing - needed):
cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s and name = %s', (mod_browse.id, dep))
self.invalidate_cache(cr, uid, ['dependencies_id'], [mod_browse.id])
def _update_category(self, cr, uid, mod_browse, category='Uncategorized'):
current_category = mod_browse.category_id
current_category_path = []
while current_category:
current_category_path.insert(0, current_category.name)
current_category = current_category.parent_id
categs = category.split('/')
if categs != current_category_path:
cat_id = create_categories(cr, categs)
mod_browse.write({'category_id': cat_id})
def update_translations(self, cr, uid, ids, filter_lang=None, context=None):
if not filter_lang:
res_lang = self.pool.get('res.lang')
lang_ids = res_lang.search(cr, uid, [('translatable', '=', True)])
filter_lang = [lang.code for lang in res_lang.browse(cr, uid, lang_ids)]
elif not isinstance(filter_lang, (list, tuple)):
filter_lang = [filter_lang]
modules = [m.name for m in self.browse(cr, uid, ids) if m.state in ('installed', 'to install', 'to upgrade')]
self.pool.get('ir.translation').load_module_terms(cr, modules, filter_lang, context=context)
def check(self, cr, uid, ids, context=None):
for mod in self.browse(cr, uid, ids, context=context):
if not mod.description:
_logger.warning('module %s: description is empty !', mod.name)
@api.model
@ormcache()
def _installed(self):
""" Return the set of installed modules as a dictionary {name: id} """
return {
module.name: module.id
for module in self.sudo().search([('state', '=', 'installed')])
}
DEP_STATES = [
('uninstallable', 'Uninstallable'),
('uninstalled', 'Not Installed'),
('installed', 'Installed'),
('to upgrade', 'To be upgraded'),
('to remove', 'To be removed'),
('to install', 'To be installed'),
('unknown', 'Unknown'),
]
class module_dependency(osv.Model):
_name = "ir.module.module.dependency"
_description = "Module dependency"
# the dependency name
name = fields2.Char(index=True)
# the module that depends on it
module_id = fields2.Many2one('ir.module.module', 'Module', ondelete='cascade')
# the module corresponding to the dependency, and its status
depend_id = fields2.Many2one('ir.module.module', 'Dependency', compute='_compute_depend')
state = fields2.Selection(DEP_STATES, string='Status', compute='_compute_state')
@api.multi
@api.depends('name')
def _compute_depend(self):
# retrieve all modules corresponding to the dependency names
names = list(set(dep.name for dep in self))
mods = self.env['ir.module.module'].search([('name', 'in', names)])
# index modules by name, and assign dependencies
name_mod = dict((mod.name, mod) for mod in mods)
for dep in self:
dep.depend_id = name_mod.get(dep.name)
@api.one
@api.depends('depend_id.state')
def _compute_state(self):
self.state = self.depend_id.state or 'unknown'
| agpl-3.0 | 1,112,513,970,195,323,400 | -7,866,685,055,210,460,000 | 44.42007 | 184 | 0.57359 | false |
ikoula/cloudstack | test/integration/smoke/test_routers_iptables_default_policy.py | 1 | 24995 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Test VPC nics after router is destroyed """
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.base import (stopRouter,
startRouter,
destroyRouter,
Account,
VpcOffering,
VPC,
ServiceOffering,
NATRule,
NetworkACL,
PublicIPAddress,
NetworkOffering,
Network,
VirtualMachine,
LoadBalancerRule)
from marvin.lib.common import (get_domain,
get_zone,
get_template,
list_routers,
list_hosts)
from marvin.lib.utils import (cleanup_resources,
get_process_status)
import socket
import time
import inspect
import logging
class Services:
"""Test VPC network services - Port Forwarding Rules Test Data Class.
"""
def __init__(self):
self.services = {
"configurableData": {
"host": {
"password": "password",
"username": "root",
"port": 22
},
"input": "INPUT",
"forward": "FORWARD"
},
"account": {
"email": "[email protected]",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
},
"shared_network_offering_sg": {
"name": "MySharedOffering-sg",
"displaytext": "MySharedOffering-sg",
"guestiptype": "Shared",
"supportedservices": "Dhcp,Dns,UserData,SecurityGroup",
"specifyVlan": "False",
"specifyIpRanges": "False",
"traffictype": "GUEST",
"serviceProviderList": {
"Dhcp": "VirtualRouter",
"Dns": "VirtualRouter",
"UserData": "VirtualRouter",
"SecurityGroup": "SecurityGroupProvider"
}
},
"network_offering": {
"name": 'Test Network offering',
"displaytext": 'Test Network offering',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
},
},
"vpc_network_offering": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Vpn,Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"vpc_network_offering_no_lb": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"vpc_offering": {
"name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
},
"redundant_vpc_offering": {
"name": 'Redundant VPC off',
"displaytext": 'Redundant VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Lb,UserData,StaticNat',
"serviceProviderList": {
"Vpn": 'VpcVirtualRouter',
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
"serviceCapabilityList": {
"SourceNat": {
"RedundantRouter": 'true'
}
},
},
"vpc": {
"name": "TestVPC",
"displaytext": "TestVPC",
"cidr": '10.1.1.1/16'
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0'
},
"natrule": {
"privateport": 22,
"publicport": 22,
"startport": 22,
"endport": 22,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
"timeout": 10,
}
class TestVPCIpTablesPolicies(cloudstackTestCase):
@classmethod
def setUpClass(cls):
# We want to fail quicker if it's failure
socket.setdefaulttimeout(60)
cls.testClient = super(TestVPCIpTablesPolicies, cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"])
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id)
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"])
cls.logger = logging.getLogger('TestVPCIpTablesPolicies')
cls.stream_handler = logging.StreamHandler()
cls.logger.setLevel(logging.DEBUG)
cls.logger.addHandler(cls.stream_handler)
cls.entity_manager = EntityManager(cls.apiclient, cls.services, cls.service_offering, cls.account, cls.zone, cls.logger)
cls._cleanup = [cls.service_offering, cls.account]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.logger.debug("Creating a VPC offering.")
self.vpc_off = VpcOffering.create(
self.apiclient,
self.services["vpc_offering"])
self.logger.debug("Enabling the VPC offering created")
self.vpc_off.update(self.apiclient, state='Enabled')
self.logger.debug("Creating a VPC network in the account: %s" % self.account.name)
self.vpc = VPC.create(
self.apiclient,
self.services["vpc"],
vpcofferingid=self.vpc_off.id,
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid)
self.cleanup = [self.vpc, self.vpc_off]
self.entity_manager.set_cleanup(self.cleanup)
return
def tearDown(self):
try:
self.entity_manager.destroy_routers()
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_01_single_VPC_iptables_policies(self):
""" Test iptables default INPUT/FORWARD policies on VPC router """
self.logger.debug("Starting test_01_single_VPC_iptables_policies")
routers = self.entity_manager.query_routers()
self.assertEqual(
isinstance(routers, list), True,
"Check for list routers response return valid data")
self.entity_manager.create_network(self.services["vpc_network_offering"], self.vpc.id, "10.1.1.1")
self.entity_manager.create_network(self.services["vpc_network_offering_no_lb"], self.vpc.id, "10.1.2.1")
self.entity_manager.add_nat_rules(self.vpc.id)
self.entity_manager.do_vpc_test()
for router in routers:
if not router.isredundantrouter and router.vpcid:
hosts = list_hosts(
self.apiclient,
id=router.hostid)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data")
host = hosts[0]
host.user = self.services["configurableData"]["host"]["username"]
host.passwd = self.services["configurableData"]["host"]["password"]
host.port = self.services["configurableData"]["host"]["port"]
tables = [self.services["configurableData"]["input"], self.services["configurableData"]["forward"]]
for table in tables:
try:
result = get_process_status(
host.ipaddress,
host.port,
host.user,
host.passwd,
router.linklocalip,
'iptables -L %s' % table)
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
self.logger.debug("iptables -L %s: %s" % (table, result))
res = str(result)
self.assertEqual(
res.count("DROP"),
1,
"%s Default Policy should be DROP" % table)
class TestRouterIpTablesPolicies(cloudstackTestCase):
@classmethod
def setUpClass(cls):
# We want to fail quicker if it's failure
socket.setdefaulttimeout(60)
cls.testClient = super(TestRouterIpTablesPolicies, cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"])
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
admin=True,
domainid=cls.domain.id)
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"])
cls.logger = logging.getLogger('TestRouterIpTablesPolicies')
cls.stream_handler = logging.StreamHandler()
cls.logger.setLevel(logging.DEBUG)
cls.logger.addHandler(cls.stream_handler)
cls.entity_manager = EntityManager(cls.apiclient, cls.services, cls.service_offering, cls.account, cls.zone, cls.logger)
cls._cleanup = [cls.service_offering, cls.account]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.cleanup = []
self.entity_manager.set_cleanup(self.cleanup)
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "intervlan"], required_hardware="true")
def test_02_routervm_iptables_policies(self):
""" Test iptables default INPUT/FORWARD policy on RouterVM """
self.logger.debug("Starting test_02_routervm_iptables_policies")
vm1 = self.entity_manager.deployvm()
routers = self.entity_manager.query_routers()
self.assertEqual(
isinstance(routers, list), True,
"Check for list routers response return valid data")
for router in routers:
if not router.isredundantrouter and not router.vpcid:
hosts = list_hosts(
self.apiclient,
id=router.hostid)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data")
host = hosts[0]
host.user = self.services["configurableData"]["host"]["username"]
host.passwd = self.services["configurableData"]["host"]["password"]
host.port = self.services["configurableData"]["host"]["port"]
tables = [self.services["configurableData"]["input"], self.services["configurableData"]["forward"]]
for table in tables:
try:
result = get_process_status(
host.ipaddress,
host.port,
host.user,
host.passwd,
router.linklocalip,
'iptables -L %s' % table)
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
self.logger.debug("iptables -L %s: %s" % (table, result))
res = str(result)
self.assertEqual(
res.count("DROP"),
1,
"%s Default Policy should be DROP" % table)
class EntityManager(object):
def __init__(self, apiclient, services, service_offering, account, zone, logger):
self.apiclient = apiclient
self.services = services
self.service_offering = service_offering
self.account = account
self.zone = zone
self.logger = logger
self.cleanup = []
self.networks = []
self.routers = []
self.ips = []
def set_cleanup(self, cleanup):
self.cleanup = cleanup
def add_nat_rules(self, vpc_id):
for o in self.networks:
for vm in o.get_vms():
if vm.get_ip() is None:
vm.set_ip(self.acquire_publicip(o.get_net(), vpc_id))
if vm.get_nat() is None:
vm.set_nat(self.create_natrule(vm.get_vm(), vm.get_ip(), o.get_net(), vpc_id))
time.sleep(5)
def do_vpc_test(self):
for o in self.networks:
for vm in o.get_vms():
self.check_ssh_into_vm(vm.get_vm(), vm.get_ip())
def create_natrule(self, vm, public_ip, network, vpc_id):
self.logger.debug("Creating NAT rule in network for vm with public IP")
nat_rule_services = self.services["natrule"]
nat_rule = NATRule.create(
self.apiclient,
vm,
nat_rule_services,
ipaddressid=public_ip.ipaddress.id,
openfirewall=False,
networkid=network.id,
vpcid=vpc_id)
self.logger.debug("Adding NetworkACL rules to make NAT rule accessible")
nwacl_nat = NetworkACL.create(
self.apiclient,
networkid=network.id,
services=nat_rule_services,
traffictype='Ingress'
)
self.logger.debug('nwacl_nat=%s' % nwacl_nat.__dict__)
return nat_rule
def check_ssh_into_vm(self, vm, public_ip):
self.logger.debug("Checking if we can SSH into VM=%s on public_ip=%s" %
(vm.name, public_ip.ipaddress.ipaddress))
vm.ssh_client = None
try:
vm.get_ssh_client(ipaddress=public_ip.ipaddress.ipaddress)
self.logger.debug("SSH into VM=%s on public_ip=%s is successful" %
(vm.name, public_ip.ipaddress.ipaddress))
except:
raise Exception("Failed to SSH into VM - %s" % (public_ip.ipaddress.ipaddress))
def create_network(self, net_offerring, vpc_id, gateway='10.1.1.1'):
try:
self.logger.debug('Create NetworkOffering')
net_offerring["name"] = "NET_OFF-" + str(gateway)
nw_off = NetworkOffering.create(
self.apiclient,
net_offerring,
conservemode=False)
nw_off.update(self.apiclient, state='Enabled')
self.logger.debug('Created and Enabled NetworkOffering')
self.services["network"]["name"] = "NETWORK-" + str(gateway)
self.logger.debug('Adding Network=%s to VPC ID %s' % (self.services["network"], vpc_id))
obj_network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=nw_off.id,
zoneid=self.zone.id,
gateway=gateway,
vpcid=vpc_id)
self.logger.debug("Created network with ID: %s" % obj_network.id)
except Exception, e:
raise Exception('Unable to create a Network with offering=%s because of %s ' % (net_offerring, e))
o = networkO(obj_network)
vm1 = self.deployvm_in_network(obj_network)
self.cleanup.insert(1, obj_network)
self.cleanup.insert(2, nw_off)
o.add_vm(vm1)
self.networks.append(o)
return o
def deployvm_in_network(self, network):
try:
self.logger.debug('Creating VM in network=%s' % network.name)
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(network.id)])
self.logger.debug('Created VM=%s in network=%s' % (vm.id, network.name))
self.cleanup.insert(0, vm)
return vm
except:
raise Exception('Unable to create VM in a Network=%s' % network.name)
def deployvm(self):
try:
self.logger.debug('Creating VM')
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id)
self.cleanup.insert(0, vm)
self.logger.debug('Created VM=%s' % vm.id)
return vm
except:
raise Exception('Unable to create VM')
def acquire_publicip(self, network, vpc_id):
self.logger.debug("Associating public IP for network: %s" % network.name)
public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=network.id,
vpcid=vpc_id)
self.logger.debug("Associated %s with network %s" % (
public_ip.ipaddress.ipaddress,
network.id))
self.ips.append(public_ip)
return public_ip
def query_routers(self):
self.routers = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid)
return self.routers
def stop_router(self, router):
self.logger.debug('Stopping router')
cmd = stopRouter.stopRouterCmd()
cmd.id = router.id
self.apiclient.stopRouter(cmd)
def destroy_routers(self):
self.logger.debug('Destroying routers')
for router in self.routers:
self.stop_router(router)
cmd = destroyRouter.destroyRouterCmd()
cmd.id = router.id
self.apiclient.destroyRouter(cmd)
self.routers = []
def start_routers(self):
self.logger.debug('Starting routers')
for router in self.routers:
cmd = startRouter.startRouterCmd()
cmd.id = router.id
self.apiclient.startRouter(cmd)
class networkO(object):
def __init__(self, net):
self.network = net
self.vms = []
def get_net(self):
return self.network
def add_vm(self, vm):
self.vms.append(vmsO(vm))
def get_vms(self):
return self.vms
class vmsO(object):
def __init__(self, vm):
self.vm = vm
self.ip = None
self.nat = None
def get_vm(self):
return self.vm
def get_ip(self):
return self.ip
def get_nat(self):
return self.nat
def set_ip(self, ip):
self.ip = ip
def set_nat(self, nat):
self.nat = nat
| gpl-2.0 | 8,347,276,217,708,920,000 | -7,825,057,223,768,585,000 | 35.865782 | 128 | 0.520424 | false |
aksalj/kernel_rpi | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 | 9,108,756,283,079,296,000 | -1,459,792,691,396,263,400 | 28.407609 | 158 | 0.679357 | false |
stephen144/odoo | addons/l10n_be_invoice_bba/partner.py | 47 | 1364 | # -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
from openerp.osv import fields, osv
import time
from openerp.tools.translate import _
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """
_inherit = 'res.partner'
def _get_comm_type(self, cr, uid, context=None):
res = self.pool.get('account.invoice')._get_reference_type(cr, uid,context=context)
return res
_columns = {
'out_inv_comm_type': fields.selection(_get_comm_type, 'Communication Type', change_default=True,
help='Select Default Communication Type for Outgoing Invoices.' ),
'out_inv_comm_algorithm': fields.selection([
('random','Random'),
('date','Date'),
('partner_ref','Customer Reference'),
], 'Communication Algorithm',
help='Select Algorithm to generate the Structured Communication on Outgoing Invoices.' ),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + \
['out_inv_comm_type', 'out_inv_comm_algorithm']
_default = {
'out_inv_comm_type': 'none',
}
| agpl-3.0 | -8,327,071,061,351,914,000 | -3,211,638,282,837,939,000 | 36.888889 | 104 | 0.633431 | false |
shakamunyi/solum | solum/common/safe_utils.py | 37 | 2103 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions that won't produce circular imports."""
import inspect
def getcallargs(function, *args, **kwargs):
"""This is a simplified inspect.getcallargs (2.7+).
It should be replaced when python >= 2.7 is standard.
"""
keyed_args = {}
argnames, varargs, keywords, defaults = inspect.getargspec(function)
keyed_args.update(kwargs)
#NOTE(alaski) the implicit 'self' or 'cls' argument shows up in
# argnames but not in args or kwargs. Uses 'in' rather than '==' because
# some tests use 'self2'.
if 'self' in argnames[0] or 'cls' == argnames[0]:
# The function may not actually be a method or have im_self.
# Typically seen when it's stubbed with mox.
if inspect.ismethod(function) and hasattr(function, 'im_self'):
keyed_args[argnames[0]] = function.im_self
else:
keyed_args[argnames[0]] = None
remaining_argnames = filter(lambda x: x not in keyed_args, argnames)
keyed_args.update(dict(zip(remaining_argnames, args)))
if defaults:
num_defaults = len(defaults)
for argname, value in zip(argnames[-num_defaults:], defaults):
if argname not in keyed_args:
keyed_args[argname] = value
return keyed_args
| apache-2.0 | -8,762,591,251,227,144,000 | -6,426,883,582,529,949,000 | 37.236364 | 78 | 0.685687 | false |
xiaotdl/ansible | lib/ansible/plugins/callback/minimal.py | 87 | 3499 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.callback import CallbackBase
from ansible import constants as C
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'minimal'
def _command_generic_msg(self, host, result, caption):
''' output the result of a command run '''
buf = "%s | %s | rc=%s >>\n" % (host, caption, result.get('rc',0))
buf += result.get('stdout','')
buf += result.get('stderr','')
buf += result.get('msg','')
return buf + "\n"
def v2_runner_on_failed(self, result, ignore_errors=False):
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color='red')
# finally, remove the exception from the result so it's not shown every time
del result._result['exception']
if result._task.action in C.MODULE_NO_JSON:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result,"FAILED"), color='red')
else:
self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='red')
def v2_runner_on_ok(self, result):
if result._task.action in C.MODULE_NO_JSON:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result,"SUCCESS"), color='green')
else:
self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color='green')
self._handle_warnings(result._result)
def v2_runner_on_skipped(self, result):
self._display.display("%s | SKIPPED" % (result._host.get_name()), color='cyan')
def v2_runner_on_unreachable(self, result):
self._display.display("%s | UNREACHABLE!" % result._host.get_name(), color='yellow')
def v2_on_file_diff(self, result):
if 'diff' in result._result and result._result['diff']:
self._display.display(self._get_diff(result._result['diff']))
| gpl-3.0 | 7,036,927,923,139,479,000 | 1,182,536,111,214,564,000 | 42.197531 | 144 | 0.647328 | false |
GDGLima/contentbox | third_party/django/contrib/messages/storage/fallback.py | 627 | 2171 | from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import CookieStorage
from django.contrib.messages.storage.session import SessionStorage
class FallbackStorage(BaseStorage):
"""
Tries to store all messages in the first backend, storing any unstored
messages in each subsequent backend backend.
"""
storage_classes = (CookieStorage, SessionStorage)
def __init__(self, *args, **kwargs):
super(FallbackStorage, self).__init__(*args, **kwargs)
self.storages = [storage_class(*args, **kwargs)
for storage_class in self.storage_classes]
self._used_storages = set()
def _get(self, *args, **kwargs):
"""
Gets a single list of messages from all storage backends.
"""
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
# If the backend hasn't been used, no more retrieval is necessary.
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)
# If this storage class contained all the messages, no further
# retrieval is necessary
if all_retrieved:
break
return all_messages, all_retrieved
def _store(self, messages, response, *args, **kwargs):
"""
Stores the messages, returning any unstored messages after trying all
backends.
For each storage backend, any messages not stored are passed on to the
next backend.
"""
for storage in self.storages:
if messages:
messages = storage._store(messages, response,
remove_oldest=False)
# Even if there are no more messages, continue iterating to ensure
# storages which contained messages are flushed.
elif storage in self._used_storages:
storage._store([], response)
self._used_storages.remove(storage)
return messages
| apache-2.0 | -5,551,794,156,965,518,000 | -2,074,792,868,599,851,800 | 39.203704 | 78 | 0.60433 | false |
thiriel/maps | venv/lib/python2.7/site-packages/django/core/files/temp.py | 536 | 1819 | """
The temp module provides a NamedTemporaryFile that can be re-opened on any
platform. Most platforms use the standard Python tempfile.TemporaryFile class,
but MS Windows users are given a custom class.
This is needed because in Windows NT, the default implementation of
NamedTemporaryFile uses the O_TEMPORARY flag, and thus cannot be reopened [1].
1: http://mail.python.org/pipermail/python-list/2005-December/359474.html
"""
import os
import tempfile
from django.core.files.utils import FileProxyMixin
__all__ = ('NamedTemporaryFile', 'gettempdir',)
if os.name == 'nt':
class TemporaryFile(FileProxyMixin):
"""
Temporary file object constructor that works in Windows and supports
reopening of the temporary file in windows.
"""
def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='',
dir=None):
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
dir=dir)
self.name = name
self.file = os.fdopen(fd, mode, bufsize)
self.close_called = False
# Because close can be called during shutdown
# we need to cache os.unlink and access it
# as self.unlink only
unlink = os.unlink
def close(self):
if not self.close_called:
self.close_called = True
try:
self.file.close()
except (OSError, IOError):
pass
try:
self.unlink(self.name)
except (OSError):
pass
def __del__(self):
self.close()
NamedTemporaryFile = TemporaryFile
else:
NamedTemporaryFile = tempfile.NamedTemporaryFile
gettempdir = tempfile.gettempdir
| bsd-3-clause | 6,142,071,922,070,875,000 | -7,328,108,219,788,688,000 | 31.482143 | 78 | 0.598131 | false |
motlib/mqtt-ts | src/sensors/sysinfo.py | 1 | 1526 | '''System info related sensor implementations.'''
from sensors.sbase import SensorBase
from datetime import datetime
class NetTraffic(SensorBase):
'''Measures the average rx and tx throughput of a network interface.'''
def __init__(self, scfg):
super().__init__(scfg)
self.device = scfg['device']
self.lasttime = None
self.old_tx = None
self.old_rx = None
def get_file_value(self, filename):
with open(filename, 'r') as f:
val = float(f.read())
return val
def sample(self):
patht = '/sys/class/net/{dev}/statistics/{stat}'
rx = self.get_file_value(patht.format(
dev=self.device, stat='rx_bytes'))
tx = self.get_file_value(patht.format(
dev=self.device, stat='tx_bytes'))
t = datetime.now()
evts = []
if self.old_rx is not None:
val = (rx - self.old_rx) / ((t - self.lasttime).total_seconds())
# TODO: I need bytes per second!
evts.append(
self.new_event(val, 'bytes per second', 'rx_throughput'))
if self.old_tx is not None:
val = (tx - self.old_tx) / ((t - self.lasttime).total_seconds())
# TODO: I need bytes per second!
evts.append(
self.new_event(val, 'bytes per second', 'tx_throughput'))
self.old_rx = rx
self.old_tx = tx
self.lasttime = t
return evts
| gpl-3.0 | -5,065,885,394,609,700,000 | 2,665,471,062,778,439,700 | 24.433333 | 76 | 0.532765 | false |
nimbusproject/epumgmt | src/python/epumgmt/sbin/most-recent-log.py | 1 | 1325 | #!/usr/bin/env python
import os
import sys
import time
from epumgmt.sbin import sbin_common
def get_logfiledir(p):
logfiledir = p.get_conf_or_none("logging", "logfiledir")
if not logfiledir:
sys.stderr.write("There is no logfiledir configuration")
return None
return sbin_common.apply_vardir_maybe(p, logfiledir)
if len(sys.argv) != 2:
sys.stderr.write("This program requires 1 argument, the absolute path to the main.conf file")
sys.exit(1)
p = sbin_common.get_parameters(sys.argv[1])
logfiledir = get_logfiledir(p)
if not logfiledir:
sys.exit(1)
# find the newest file in the directory:
sys.stderr.write("Log file dir: %s\n" % logfiledir)
sortme = []
for root, dirs, files in os.walk(logfiledir):
for name in files:
path = os.path.join(logfiledir, name)
if os.path.isfile(path):
astat = os.stat(path)
modtime = time.localtime(astat[8])
sortme.append((modtime, path))
break # only look in the top directory
if len(sortme) == 0:
sys.stderr.write("Could not find any files in: %s" % logfiledir)
sys.exit(1)
sortme.sort()
newest_file = sortme[-1][1]
sys.stderr.write("Newest log file: %s\n" % newest_file)
sys.stderr.flush()
f = open(newest_file)
for line in f:
print line,
f.close()
| apache-2.0 | 3,242,943,026,347,675,600 | -3,042,679,984,679,953,400 | 24.480769 | 97 | 0.655094 | false |
Aperturedimigo/Complete | QT/GUI_Privasee_notYourFace.py | 1 | 1271 | import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import QPainter
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
windowSizeX = 440
windowSizeY = 250
fontMajor = "Arial"
fontMinor = "Dotum"
class Form(QWidget):
# __init__ : 생성자
# parent : 부모객체
def __init__(self, parent=None):
super(Form, self).__init__(parent)
nameLabel = QLabel()
nameLabel.setText("You've got wrong face.")
newfont = QtGui.QFont(fontMinor, 16, QtGui.QFont.Bold)
nameLabel.setFont(newfont)
nameLabel.setAlignment(Qt.AlignCenter)
blankLabel = QLabel()
blankLabel.setText(" ")
backButton = QPushButton("Back")
backButton.clicked.connect(self.lockContact)
mainLayout = QGridLayout()
mainLayout.addWidget(nameLabel,0,0)
mainLayout.addWidget(backButton,1,0)
self.setLayout(mainLayout)
self.setWindowTitle("Privasee")
def lockContact(self):
'''call GUI_Privasee_Main.py'''
sys.exit(app.exec_())
if __name__ == '__main__':
app = QApplication(sys.argv)
screen = Form()
screen.resize(windowSizeX,windowSizeY)
screen.show()
sys.exit(app.exec_())
| mit | 5,018,753,684,744,137,000 | -5,872,455,049,777,639,000 | 21.446429 | 62 | 0.640414 | false |
jumpstarter-io/neutron | neutron/plugins/embrane/l2base/ml2/ml2_support.py | 2 | 2160 | # Copyright 2014 Embrane, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ivar Lazzaro, Embrane, Inc.
# @author: Ignacio Scopetta, Embrane, Inc.
from heleosapi import info as h_info
from neutron.common import constants
from neutron import manager
from neutron.plugins.embrane.l2base import support_base as base
from neutron.plugins.embrane.l2base import support_exceptions as exc
class Ml2Support(base.SupportBase):
"""Modular Layer 2 plugin support.
Obtains the information needed to build the user security zones.
"""
def __init__(self):
super(Ml2Support, self).__init__()
def retrieve_utif_info(self, context, neutron_port):
plugin = manager.NeutronManager.get_plugin()
network = plugin.get_network(
context, neutron_port['network_id'])
is_gw = (neutron_port["device_owner"] ==
constants.DEVICE_OWNER_ROUTER_GW)
network_type = network.get('provider:network_type')
if network_type != 'vlan':
raise exc.UtifInfoError(
err_msg=_("Network type %s not supported. Please be sure "
"that tenant_network_type is vlan") % network_type)
result = h_info.UtifInfo(network.get('provider:segmentation_id'),
network['name'],
network['id'],
is_gw,
network['tenant_id'],
neutron_port['id'],
neutron_port['mac_address'])
return result
| apache-2.0 | -1,308,197,387,644,127,000 | 5,483,496,117,576,129,000 | 38.272727 | 78 | 0.616667 | false |
diderson/couchapp | docs/conf.py | 2 | 9433 | # -*- coding: utf-8 -*-
#
# CouchApp documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 5 15:00:02 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CouchApp'
copyright = u'2015, <a href="https://github.com/couchapp/couchapp/graphs/contributors">Various CouchApp Contributors</a>'
author = u'Various CouchApp Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.1'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'CouchAppdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'CouchApp.tex', u'CouchApp Documentation',
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'CouchApp', u'CouchApp Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'CouchApp', u'CouchApp Documentation',
author, 'CouchApp', 'Utilities to make standalone CouchDB application development simple',
'Development'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| apache-2.0 | 5,667,123,898,632,543,000 | -6,842,079,883,814,770,000 | 31.527586 | 121 | 0.708046 | false |
brandond/ansible | lib/ansible/modules/cloud/opennebula/one_image.py | 52 | 11657 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
"""
(c) 2018, Milan Ilic <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a clone of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: one_image
short_description: Manages OpenNebula images
description:
- Manages OpenNebula images
version_added: "2.6"
requirements:
- python-oca
options:
api_url:
description:
- URL of the OpenNebula RPC server.
- It is recommended to use HTTPS so that the username/password are not
- transferred over the network unencrypted.
- If not set then the value of the C(ONE_URL) environment variable is used.
api_username:
description:
- Name of the user to login into the OpenNebula RPC server. If not set
- then the value of the C(ONE_USERNAME) environment variable is used.
api_password:
description:
- Password of the user to login into OpenNebula RPC server. If not set
- then the value of the C(ONE_PASSWORD) environment variable is used.
id:
description:
- A C(id) of the image you would like to manage.
name:
description:
- A C(name) of the image you would like to manage.
state:
description:
- C(present) - state that is used to manage the image
- C(absent) - delete the image
- C(cloned) - clone the image
- C(renamed) - rename the image to the C(new_name)
choices: ["present", "absent", "cloned", "renamed"]
default: present
enabled:
description:
- Whether the image should be enabled or disabled.
type: bool
new_name:
description:
- A name that will be assigned to the existing or new image.
- In the case of cloning, by default C(new_name) will take the name of the origin image with the prefix 'Copy of'.
author:
- "Milan Ilic (@ilicmilan)"
'''
EXAMPLES = '''
# Fetch the IMAGE by id
- one_image:
id: 45
register: result
# Print the IMAGE properties
- debug:
msg: result
# Rename existing IMAGE
- one_image:
id: 34
state: renamed
new_name: bar-image
# Disable the IMAGE by id
- one_image:
id: 37
enabled: no
# Enable the IMAGE by name
- one_image:
name: bar-image
enabled: yes
# Clone the IMAGE by name
- one_image:
name: bar-image
state: cloned
new_name: bar-image-clone
register: result
# Delete the IMAGE by id
- one_image:
id: '{{ result.id }}'
state: absent
'''
RETURN = '''
id:
description: image id
type: int
returned: success
sample: 153
name:
description: image name
type: str
returned: success
sample: app1
group_id:
description: image's group id
type: int
returned: success
sample: 1
group_name:
description: image's group name
type: str
returned: success
sample: one-users
owner_id:
description: image's owner id
type: int
returned: success
sample: 143
owner_name:
description: image's owner name
type: str
returned: success
sample: ansible-test
state:
description: state of image instance
type: str
returned: success
sample: READY
used:
description: is image in use
type: bool
returned: success
sample: true
running_vms:
description: count of running vms that use this image
type: int
returned: success
sample: 7
'''
try:
import oca
HAS_OCA = True
except ImportError:
HAS_OCA = False
from ansible.module_utils.basic import AnsibleModule
import os
def get_image(module, client, predicate):
pool = oca.ImagePool(client)
# Filter -2 means fetch all images user can Use
pool.info(filter=-2)
for image in pool:
if predicate(image):
return image
return None
def get_image_by_name(module, client, image_name):
return get_image(module, client, lambda image: (image.name == image_name))
def get_image_by_id(module, client, image_id):
return get_image(module, client, lambda image: (image.id == image_id))
def get_image_instance(module, client, requested_id, requested_name):
if requested_id:
return get_image_by_id(module, client, requested_id)
else:
return get_image_by_name(module, client, requested_name)
IMAGE_STATES = ['INIT', 'READY', 'USED', 'DISABLED', 'LOCKED', 'ERROR', 'CLONE', 'DELETE', 'USED_PERS', 'LOCKED_USED', 'LOCKED_USED_PERS']
def get_image_info(image):
image.info()
info = {
'id': image.id,
'name': image.name,
'state': IMAGE_STATES[image.state],
'running_vms': image.running_vms,
'used': bool(image.running_vms),
'user_name': image.uname,
'user_id': image.uid,
'group_name': image.gname,
'group_id': image.gid,
}
return info
def wait_for_state(module, image, wait_timeout, state_predicate):
import time
start_time = time.time()
while (time.time() - start_time) < wait_timeout:
image.info()
state = image.state
if state_predicate(state):
return image
time.sleep(1)
module.fail_json(msg="Wait timeout has expired!")
def wait_for_ready(module, image, wait_timeout=60):
return wait_for_state(module, image, wait_timeout, lambda state: (state in [IMAGE_STATES.index('READY')]))
def wait_for_delete(module, image, wait_timeout=60):
return wait_for_state(module, image, wait_timeout, lambda state: (state in [IMAGE_STATES.index('DELETE')]))
def enable_image(module, client, image, enable):
image.info()
changed = False
state = image.state
if state not in [IMAGE_STATES.index('READY'), IMAGE_STATES.index('DISABLED'), IMAGE_STATES.index('ERROR')]:
if enable:
module.fail_json(msg="Cannot enable " + IMAGE_STATES[state] + " image!")
else:
module.fail_json(msg="Cannot disable " + IMAGE_STATES[state] + " image!")
if ((enable and state != IMAGE_STATES.index('READY')) or
(not enable and state != IMAGE_STATES.index('DISABLED'))):
changed = True
if changed and not module.check_mode:
client.call('image.enable', image.id, enable)
result = get_image_info(image)
result['changed'] = changed
return result
def clone_image(module, client, image, new_name):
if new_name is None:
new_name = "Copy of " + image.name
tmp_image = get_image_by_name(module, client, new_name)
if tmp_image:
result = get_image_info(tmp_image)
result['changed'] = False
return result
if image.state == IMAGE_STATES.index('DISABLED'):
module.fail_json(msg="Cannot clone DISABLED image")
if not module.check_mode:
new_id = client.call('image.clone', image.id, new_name)
image = get_image_by_id(module, client, new_id)
wait_for_ready(module, image)
result = get_image_info(image)
result['changed'] = True
return result
def rename_image(module, client, image, new_name):
if new_name is None:
module.fail_json(msg="'new_name' option has to be specified when the state is 'renamed'")
if new_name == image.name:
result = get_image_info(image)
result['changed'] = False
return result
tmp_image = get_image_by_name(module, client, new_name)
if tmp_image:
module.fail_json(msg="Name '" + new_name + "' is already taken by IMAGE with id=" + str(tmp_image.id))
if not module.check_mode:
client.call('image.rename', image.id, new_name)
result = get_image_info(image)
result['changed'] = True
return result
def delete_image(module, client, image):
if not image:
return {'changed': False}
if image.running_vms > 0:
module.fail_json(msg="Cannot delete image. There are " + str(image.running_vms) + " VMs using it.")
if not module.check_mode:
client.call('image.delete', image.id)
wait_for_delete(module, image)
return {'changed': True}
def get_connection_info(module):
url = module.params.get('api_url')
username = module.params.get('api_username')
password = module.params.get('api_password')
if not url:
url = os.environ.get('ONE_URL')
if not username:
username = os.environ.get('ONE_USERNAME')
if not password:
password = os.environ.get('ONE_PASSWORD')
if not(url and username and password):
module.fail_json(msg="One or more connection parameters (api_url, api_username, api_password) were not specified")
from collections import namedtuple
auth_params = namedtuple('auth', ('url', 'username', 'password'))
return auth_params(url=url, username=username, password=password)
def main():
fields = {
"api_url": {"required": False, "type": "str"},
"api_username": {"required": False, "type": "str"},
"api_password": {"required": False, "type": "str", "no_log": True},
"id": {"required": False, "type": "int"},
"name": {"required": False, "type": "str"},
"state": {
"default": "present",
"choices": ['present', 'absent', 'cloned', 'renamed'],
"type": "str"
},
"enabled": {"required": False, "type": "bool"},
"new_name": {"required": False, "type": "str"},
}
module = AnsibleModule(argument_spec=fields,
mutually_exclusive=[['id', 'name']],
supports_check_mode=True)
if not HAS_OCA:
module.fail_json(msg='This module requires python-oca to work!')
auth = get_connection_info(module)
params = module.params
id = params.get('id')
name = params.get('name')
state = params.get('state')
enabled = params.get('enabled')
new_name = params.get('new_name')
client = oca.Client(auth.username + ':' + auth.password, auth.url)
result = {}
if not id and state == 'renamed':
module.fail_json(msg="Option 'id' is required when the state is 'renamed'")
image = get_image_instance(module, client, id, name)
if not image and state != 'absent':
if id:
module.fail_json(msg="There is no image with id=" + str(id))
else:
module.fail_json(msg="There is no image with name=" + name)
if state == 'absent':
result = delete_image(module, client, image)
else:
result = get_image_info(image)
changed = False
result['changed'] = False
if enabled is not None:
result = enable_image(module, client, image, enabled)
if state == "cloned":
result = clone_image(module, client, image, new_name)
elif state == "renamed":
result = rename_image(module, client, image, new_name)
changed = changed or result['changed']
result['changed'] = changed
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -4,148,977,132,833,552,400 | 2,549,155,879,539,935,700 | 26.492925 | 138 | 0.629922 | false |
pferreir/indico-backup | indico/MaKaC/authentication/LocalAuthentication.py | 2 | 2188 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
import bcrypt
from MaKaC.authentication.baseAuthentication import Authenthicator, PIdentity, SSOHandler
from MaKaC.i18n import _
class LocalAuthenticator(Authenthicator, SSOHandler):
idxName = "localIdentities"
id = "Local"
name = "Indico"
desciption = "Indico Login"
def __init__(self):
Authenthicator.__init__(self)
def createIdentity(self, li, avatar):
return LocalIdentity(li.getLogin(), li.getPassword(), avatar)
def createIdentitySSO(self, login, avatar):
return LocalIdentity(login, None, avatar)
class LocalIdentity(PIdentity):
def __init__(self, login, password, user):
PIdentity.__init__(self, login, user)
self.setPassword(password)
def setPassword(self, newPwd):
self.algorithm = 'bcrypt'
if newPwd is not None:
self.password = bcrypt.hashpw(newPwd, bcrypt.gensalt())
else:
# This happens e.g. when SSO is used with Local identities.
# The user can add the password later if he wants to anyway
self.password = None
def authenticate(self, id):
if self.password is None:
return None
if self.getLogin() == id.getLogin() and self.password == bcrypt.hashpw(id.getPassword(), self.password):
return self.user
return None
def getAuthenticatorTag(self):
return LocalAuthenticator.getId()
| gpl-3.0 | 87,603,695,466,493,890 | -327,501,545,168,808,200 | 32.661538 | 112 | 0.678245 | false |
foursquare/commons-old | src/python/twitter/pants/tasks/markdown_to_html.py | 1 | 9585 | # ==================================================================================================
# Copyright 2012 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
__author__ = 'John Sirois'
try:
import markdown
WIKILINKS_PATTERN = r'\[\[([^\]]+)\]\]'
class WikilinksPattern(markdown.inlinepatterns.Pattern):
def __init__(self, build_url, markdown_instance=None):
markdown.inlinepatterns.Pattern.__init__(self, WIKILINKS_PATTERN, markdown_instance)
self.build_url = build_url
def handleMatch(self, m):
alias, url = self.build_url(m.group(2).strip())
el = markdown.util.etree.Element('a')
el.set('href', url)
el.text = markdown.util.AtomicString(alias)
return el
class WikilinksExtension(markdown.Extension):
def __init__(self, build_url, configs=None):
markdown.Extension.__init__(self, configs or {})
self.build_url = build_url
def extendMarkdown(self, md, md_globals):
md.inlinePatterns['wikilinks'] = WikilinksPattern(self.build_url, md)
HAS_MARKDOWN = True
except ImportError:
HAS_MARKDOWN = False
try:
from pygments.formatters.html import HtmlFormatter
from pygments.styles import get_all_styles
def configure_codehighlight_options(option_group, mkflag):
all_styles = list(get_all_styles())
option_group.add_option(mkflag("code-style"), dest="markdown_to_html_code_style",
type="choice", choices=all_styles,
help="Selects the stylesheet to use for code highlights, one of: "
"%s." % ' '.join(all_styles))
def emit_codehighlight_css(path, style):
with safe_open(path, 'w') as css:
css.write((HtmlFormatter(style=style)).get_style_defs('.codehilite'))
return path
except ImportError:
def configure_codehighlight_options(option_group, mkflag): pass
def emit_codehighlight_css(path, style): pass
import os
import re
import textwrap
from twitter.common.dirutil import safe_open
from twitter.pants import get_buildroot
from twitter.pants.base import Address, Target
from twitter.pants.targets import Page
from twitter.pants.tasks import binary_utils, Task, TaskError
class MarkdownToHtml(Task):
AVAILABLE = HAS_MARKDOWN
@classmethod
def setup_parser(cls, option_group, args, mkflag):
configure_codehighlight_options(option_group, mkflag)
option_group.add_option(mkflag("open"), mkflag("open", negate=True),
dest = "markdown_to_html_open",
action="callback", callback=mkflag.set_bool, default=False,
help = "[%default] Open the generated documents in a browser.")
option_group.add_option(mkflag("standalone"), mkflag("standalone", negate=True),
dest = "markdown_to_html_standalone",
action="callback", callback=mkflag.set_bool, default=False,
help = "[%default] Generate a well-formed standalone html document.")
option_group.add_option(mkflag("outdir"), dest="markdown_to_html_outdir",
help="Emit generated html in to this directory.")
option_group.add_option(mkflag("extension"), dest = "markdown_to_html_extensions",
action="append",
help = "Override the default markdown extensions and process pages "
"whose source have these extensions instead.")
def __init__(self, context):
Task.__init__(self, context)
self.open = context.options.markdown_to_html_open
self.outdir = (
context.options.markdown_to_html_outdir
or context.config.get('markdown-to-html', 'workdir')
)
self.extensions = set(
context.options.markdown_to_html_extensions
or context.config.getlist('markdown-to-html', 'extensions', ['.md'])
)
self.standalone = context.options.markdown_to_html_standalone
self.code_style = context.config.get('markdown-to-html', 'code-style')
if hasattr(context.options, 'markdown_to_html_code_style'):
if context.options.markdown_to_html_code_style:
self.code_style = context.options.markdown_to_html_code_style
def execute(self, targets):
if not MarkdownToHtml.AVAILABLE:
raise TaskError('Cannot process markdown - no markdown lib on the sys.path')
# TODO(John Sirois): consider adding change detection
css_relpath = os.path.join('css', 'codehighlight.css')
css = emit_codehighlight_css(os.path.join(self.outdir, css_relpath), self.code_style)
if css:
self.context.log.info('Emitted %s' % css)
def is_page(target):
return isinstance(target, Page)
roots = set()
interior_nodes = set()
if self.open:
dependencies_by_page = self.context.dependants(on_predicate=is_page, from_predicate=is_page)
roots.update(dependencies_by_page.keys())
for dependencies in dependencies_by_page.values():
interior_nodes.update(dependencies)
roots.difference_update(dependencies)
for page in self.context.targets(is_page):
# There are no in or out edges so we need to show show this isolated page.
if not page.dependencies and page not in interior_nodes:
roots.add(page)
genmap = self.context.products.get('markdown_html')
show = []
for page in filter(is_page, targets):
_, ext = os.path.splitext(page.source)
if ext in self.extensions:
def process_page(key, outdir, url_builder, config):
outputs = list()
if css and self.standalone:
outputs.append(css_relpath)
html_path = self.process(
outdir,
page.target_base,
page.source,
self.standalone,
url_builder,
config,
css=css
)
self.context.log.info('Processed %s to %s' % (page.source, html_path))
outputs.append(os.path.relpath(html_path, outdir))
genmap.add(key, outdir, outputs)
return html_path
def url_builder(linked_page, config=None):
path, ext = os.path.splitext(linked_page.source)
return linked_page.name, os.path.relpath(path + '.html', os.path.dirname(page.source))
html = process_page(page, os.path.join(self.outdir, 'html'), url_builder, lambda p: None)
if self.open and page in roots:
show.append(html)
for wiki in page.wikis():
def get_config(page):
return page.wiki_config(wiki)
basedir = os.path.join(self.outdir, wiki.id)
process_page((wiki, page), basedir, wiki.url_builder, get_config)
if show:
binary_utils.open(*show)
PANTS_LINK = re.compile(r'''pants\(['"]([^)]+)['"]\)''')
def process(self, outdir, base, source, standalone, url_builder, get_config, css=None):
def parse_url(spec):
match = MarkdownToHtml.PANTS_LINK.match(spec)
if match:
page = Target.get(Address.parse(get_buildroot(), match.group(1)))
if not page:
raise TaskError('Invalid link %s' % match.group(1))
alias, url = url_builder(page, config=get_config(page))
return alias, url
else:
return spec, spec
def build_url(label):
components = label.split('|', 1)
if len(components) == 1:
return parse_url(label.strip())
else:
alias, link = components
_, url = parse_url(link.strip())
return alias, url
wikilinks = WikilinksExtension(build_url)
path, ext = os.path.splitext(source)
with safe_open(os.path.join(outdir, path + '.html'), 'w') as output:
with open(os.path.join(get_buildroot(), base, source), 'r') as input:
md_html = markdown.markdown(
input.read(),
extensions=['codehilite(guess_lang=False)', 'extra', 'tables', 'toc', wikilinks],
)
if standalone:
if css:
css_relpath = os.path.relpath(css, outdir)
out_relpath = os.path.dirname(source)
link_relpath = os.path.relpath(css_relpath, out_relpath)
css = '<link rel="stylesheet" type="text/css" href="%s"/>' % link_relpath
html = textwrap.dedent('''
<html>
<head>
%s
</head>
<body>
<!-- generated by pants! -->
%s
</body>
</html>
''').strip() % (css or '', md_html)
output.write(html)
else:
if css:
with safe_open(css) as fd:
output.write(textwrap.dedent('''
<style type="text/css">
%s
</style>
''').strip() % fd.read())
output.write('\n')
output.write(md_html)
return output.name
| apache-2.0 | 1,046,458,090,881,912,300 | 8,610,386,241,179,915,000 | 37.035714 | 100 | 0.597809 | false |
kennedyshead/home-assistant | tests/components/zwave/conftest.py | 6 | 2428 | """Fixtures for Z-Wave tests."""
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from homeassistant.components.zwave import const
from tests.components.light.conftest import mock_light_profiles # noqa: F401
from tests.mock.zwave import MockNetwork, MockNode, MockOption, MockValue
@pytest.fixture
def mock_openzwave():
"""Mock out Open Z-Wave."""
base_mock = MagicMock()
libopenzwave = base_mock.libopenzwave
libopenzwave.__file__ = "test"
base_mock.network.ZWaveNetwork = MockNetwork
base_mock.option.ZWaveOption = MockOption
with patch.dict(
"sys.modules",
{
"libopenzwave": libopenzwave,
"openzwave.option": base_mock.option,
"openzwave.network": base_mock.network,
"openzwave.group": base_mock.group,
},
):
yield base_mock
@pytest.fixture
def mock_discovery():
"""Mock discovery."""
discovery = MagicMock()
discovery.async_load_platform = AsyncMock(return_value=None)
yield discovery
@pytest.fixture
def mock_import_module():
"""Mock import module."""
platform = MagicMock()
mock_device = MagicMock()
mock_device.name = "test_device"
platform.get_device.return_value = mock_device
import_module = MagicMock()
import_module.return_value = platform
yield import_module
@pytest.fixture
def mock_values():
"""Mock values."""
node = MockNode()
mock_schema = {
const.DISC_COMPONENT: "mock_component",
const.DISC_VALUES: {
const.DISC_PRIMARY: {const.DISC_COMMAND_CLASS: ["mock_primary_class"]},
"secondary": {const.DISC_COMMAND_CLASS: ["mock_secondary_class"]},
"optional": {
const.DISC_COMMAND_CLASS: ["mock_optional_class"],
const.DISC_OPTIONAL: True,
},
},
}
value_class = MagicMock()
value_class.primary = MockValue(
command_class="mock_primary_class", node=node, value_id=1000
)
value_class.secondary = MockValue(command_class="mock_secondary_class", node=node)
value_class.duplicate_secondary = MockValue(
command_class="mock_secondary_class", node=node
)
value_class.optional = MockValue(command_class="mock_optional_class", node=node)
value_class.no_match_value = MockValue(command_class="mock_bad_class", node=node)
yield (node, value_class, mock_schema)
| apache-2.0 | -3,302,917,385,268,499,500 | -7,380,948,105,621,669,000 | 29.35 | 86 | 0.650741 | false |
skia-dev/oss-fuzz | projects/urllib3/fuzz_urlparse.py | 3 | 1029 | #!/usr/bin/python3
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import atheris
import urllib3
def TestOneInput(data):
fdp = atheris.FuzzedDataProvider(data)
original = fdp.ConsumeUnicode(sys.maxsize)
try:
urllib3.util.parse_url(original)
except urllib3.exceptions.LocationParseError:
None
return
def main():
atheris.Setup(sys.argv, TestOneInput, enable_python_coverage=True)
atheris.Fuzz()
if __name__ == "__main__":
main()
| apache-2.0 | -2,441,122,846,910,653,400 | 6,025,916,869,144,720,000 | 26.810811 | 74 | 0.726919 | false |
RowenStipe/qTox | tools/update-server/deployUpdateFTP_64.py | 21 | 1100 | #!/usr/bin/python2.7
# This script deploys a qTox update to an FTP server.
# Pass the path to the root of the local update server as argument, no spaces allowed
import sys
import os
target = 'win64'
prefix = '/qtox/'+target+'/'
uploadcmd1 = "bash -c '"+'ftp -n tux3-dev.tox.chat 0<<<"`echo -ne "user qtox-win-update-upload INSERT-PASSWORD-HERE\ncd '+target+'\nsend '
uploadcmd2 = '\n"`"'+"'"
def upload(file, rfile):
#print(uploadcmd1+file+' '+rfile+uploadcmd2)
os.system(uploadcmd1+file+' '+rfile+uploadcmd2)
# Check our local folders
if (len(sys.argv) < 2):
print("ERROR: Needs the path to the local update server in argument")
sys.exit(1)
localpath = sys.argv[1];
# Upload files/
filenames = next(os.walk(localpath+prefix+'/files/'))[2]
for filename in filenames:
print("Uploading files/"+filename+'...')
upload(localpath+prefix+'/files/'+filename, 'files/'+filename)
# Upload version and flist
print("Uploading flist...")
upload(localpath+prefix+'flist', 'flist')
print("Uploading version...")
upload(localpath+prefix+'version', 'version')
| gpl-3.0 | 5,801,396,132,734,555,000 | 26,000,184,615,937,570 | 30.428571 | 138 | 0.679091 | false |
ICGC-TCGA-PanCancer/pancancer-sandbox | pcawg_metadata_parser/generate_all_donors.py | 1 | 24480 | #!/usr/bin/env python
import sys
import os
import re
import glob
import xmltodict
import json
import yaml
import copy
import logging
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from elasticsearch import Elasticsearch
from collections import OrderedDict
import datetime
import dateutil.parser
from itertools import izip
from distutils.version import LooseVersion
es_queries = [
# query 0: donors_sanger_vcf_without_missing_bams
{
"fields": "donor_unique_id",
"size": 10000
}
]
def create_reorganized_donor(donor_unique_id, es_json):
reorganized_donor = {
'donor_unique_id': donor_unique_id,
'submitter_donor_id': es_json['submitter_donor_id'],
'dcc_project_code': es_json['dcc_project_code'],
'data_train': 'train2' if es_json.get('flags').get('is_train2_donor') else 'train3',
'train2_pilot': True if es_json.get('flags').get('is_train2_pilot') else False,
'wgs': {
'normal_specimen': {
'bwa_alignment': {
}
},
'tumor_specimens': []
},
'rna_seq': {
'normal_specimen': {},
'tumor_specimens': []
}
}
if es_json.get('normal_alignment_status') and es_json.get('normal_alignment_status').get('aligned_bam'):
reorganized_donor['wgs']['normal_specimen']['bwa_alignment'] = {
'submitter_specimen_id': es_json.get('normal_alignment_status').get('submitter_specimen_id'),
'submitter_sample_id': es_json.get('normal_alignment_status').get('submitter_sample_id'),
'specimen_type': es_json.get('normal_alignment_status').get('dcc_specimen_type'),
'aliquot_id': es_json.get('normal_alignment_status').get('aliquot_id'),
'gnos_repo': filter_liri_jp(es_json.get('dcc_project_code'), es_json.get('normal_alignment_status').get('aligned_bam').get('gnos_repo')),
'gnos_id': es_json.get('normal_alignment_status').get('aligned_bam').get('gnos_id'),
'gnos_last_modified': es_json.get('normal_alignment_status').get('aligned_bam').get('gnos_last_modified')[-1],
'files': [
{
'bam_file_name': es_json.get('normal_alignment_status').get('aligned_bam').get('bam_file_name'),
'bam_file_md5sum': es_json.get('normal_alignment_status').get('aligned_bam').get('bam_file_md5sum'),
'bam_file_size': es_json.get('normal_alignment_status').get('aligned_bam').get('bam_file_size')
}
]
}
add_wgs_tumor_specimens(reorganized_donor, es_json)
add_rna_seq_info(reorganized_donor, es_json)
return reorganized_donor
def add_wgs_tumor_specimens(reorganized_donor, es_json):
wgs_tumor_alignment_info = es_json.get('tumor_alignment_status') \
if es_json.get('tumor_alignment_status') else []
wgs_tumor_sanger_vcf_info = es_json.get('variant_calling_results').get('sanger_variant_calling') \
if es_json.get('variant_calling_results') else {}
sanger_vcf_files = wgs_tumor_sanger_vcf_info.get('files')
tumor_wgs_specimen_count = 0
aliquot_info = {}
for aliquot in wgs_tumor_alignment_info:
tumor_wgs_specimen_count += 1
aliquot_id = aliquot.get('aliquot_id')
aliquot_info = {
'bwa_alignment':{ },
'sanger_variant_calling':{ }
}
if aliquot.get('aligned_bam'):
aliquot_info['bwa_alignment'] = {
'submitter_specimen_id': aliquot.get('submitter_specimen_id'),
'submitter_sample_id': aliquot.get('submitter_sample_id'),
'specimen_type': aliquot.get('dcc_specimen_type'),
'aliquot_id': aliquot.get('aliquot_id'),
'gnos_repo': filter_liri_jp(es_json.get('dcc_project_code'), aliquot.get('aligned_bam').get('gnos_repo')),
'gnos_id': aliquot.get('aligned_bam').get('gnos_id'),
'gnos_last_modified': aliquot.get('aligned_bam').get('gnos_last_modified')[-1],
'files':[
{
'bam_file_name': aliquot.get('aligned_bam').get('bam_file_name'),
'bam_file_md5sum': aliquot.get('aligned_bam').get('bam_file_md5sum'),
'bam_file_size': aliquot.get('aligned_bam').get('bam_file_size')
}
]
}
if sanger_vcf_files:
aliquot_info['sanger_variant_calling'] = {
'submitter_specimen_id': aliquot.get('submitter_specimen_id'),
'submitter_sample_id': aliquot.get('submitter_sample_id'),
'specimen_type': aliquot.get('dcc_specimen_type'),
'aliquot_id': aliquot.get('aliquot_id'),
'gnos_repo': wgs_tumor_sanger_vcf_info.get('gnos_repo'),
'gnos_id': wgs_tumor_sanger_vcf_info.get('gnos_id'),
'gnos_last_modified': wgs_tumor_sanger_vcf_info.get('gnos_last_modified')[-1],
'files':[]
}
for f in sanger_vcf_files:
if aliquot_id in f.get('file_name'):
aliquot_info.get('sanger_variant_calling').get('files').append(f)
reorganized_donor.get('wgs').get('tumor_specimens').append(aliquot_info)
reorganized_donor['tumor_wgs_specimen_count'] = tumor_wgs_specimen_count
def filter_liri_jp(project, gnos_repo):
if not project == 'LIRI-JP':
return gnos_repo
elif "https://gtrepo-riken.annailabs.com/" in gnos_repo:
return ["https://gtrepo-riken.annailabs.com/"]
else:
print "This should never happen: alignment for LIRI-JP is not available at Riken repo"
sys.exit(1)
def add_rna_seq_info(reorganized_donor, es_json):
# to build pcawg santa cruz pilot dataset, this is a temporary walkaround to exclude the 130 RNA-Seq bad
# entries from MALY-DE and CLLE-ES projects
#if reorganized_donor.get('dcc_project_code') in ('MALY-DE', 'CLLE-ES'): return
rna_seq_info = es_json.get('rna_seq').get('alignment')
for specimen_type in rna_seq_info.keys():
if not rna_seq_info.get(specimen_type): # the specimen_type has no alignment result
continue
if 'normal' in specimen_type:
aliquot = rna_seq_info.get(specimen_type)
alignment_info = {}
for workflow_type in aliquot.keys():
alignment_info[workflow_type] = {
'submitter_specimen_id': aliquot.get(workflow_type).get('submitter_specimen_id'),
'submitter_sample_id': aliquot.get(workflow_type).get('submitter_sample_id'),
'specimen_type': aliquot.get(workflow_type).get('dcc_specimen_type'),
'aliquot_id': aliquot.get(workflow_type).get('aliquot_id'),
'gnos_repo': aliquot.get(workflow_type).get('gnos_info').get('gnos_repo'),
'gnos_id': aliquot.get(workflow_type).get('gnos_info').get('gnos_id'),
'gnos_last_modified': aliquot.get(workflow_type).get('gnos_info').get('gnos_last_modified')[-1],
'files': [
{
'bam_file_name': aliquot.get(workflow_type).get('gnos_info').get('bam_file_name'),
'bam_file_md5sum': aliquot.get(workflow_type).get('gnos_info').get('bam_file_md5sum'),
'bam_file_size': aliquot.get(workflow_type).get('gnos_info').get('bam_file_size')
}
]
}
reorganized_donor.get('rna_seq')[specimen_type + ('_specimens' if specimen_type == 'tumor' else '_specimen')] = alignment_info
else:
for aliquot in rna_seq_info.get(specimen_type):
alignment_info = {}
for workflow_type in aliquot.keys():
alignment_info[workflow_type] = {
'submitter_specimen_id': aliquot.get(workflow_type).get('submitter_specimen_id'),
'submitter_sample_id': aliquot.get(workflow_type).get('submitter_sample_id'),
'specimen_type': aliquot.get(workflow_type).get('dcc_specimen_type'),
'aliquot_id': aliquot.get(workflow_type).get('aliquot_id'),
'gnos_repo': aliquot.get(workflow_type).get('gnos_info').get('gnos_repo'),
'gnos_id': aliquot.get(workflow_type).get('gnos_info').get('gnos_id'),
'gnos_last_modified': aliquot.get(workflow_type).get('gnos_info').get('gnos_last_modified')[-1],
'files': [
{
'bam_file_name': aliquot.get(workflow_type).get('gnos_info').get('bam_file_name'),
'bam_file_md5sum': aliquot.get(workflow_type).get('gnos_info').get('bam_file_md5sum'),
'bam_file_size': aliquot.get(workflow_type).get('gnos_info').get('bam_file_size')
}
]
}
reorganized_donor.get('rna_seq')[specimen_type + '_specimens'].append(alignment_info)
def get_donor_json(es, es_index, donor_unique_id):
es_query_donor = {
"query": {
"term": {
"donor_unique_id": donor_unique_id
}
}
}
response = es.search(index=es_index, body=es_query_donor)
es_json = response['hits']['hits'][0]['_source']
return es_json
def get_donors_list(es, es_index, es_queries):
q_index = 0
response = es.search(index=es_index, body=es_queries[q_index])
donors_list = []
for p in response['hits']['hits']:
donors_list.append(p.get('fields').get('donor_unique_id')[0])
return donors_list
def init_es(es_host, es_index):
es = Elasticsearch([ es_host ])
es.indices.create( es_index, ignore=400 )
# create mappings
es_mapping = open('pancan.reorganized.donor.mapping.json')
es.indices.put_mapping(index=es_index, doc_type='donor', body=es_mapping.read())
es_mapping.close()
return es
def set_default(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if isinstance(obj, set):
return list(obj)
raise TypeError
def generate_json_for_tsv_file(reorganized_donor):
pilot_tsv_json = OrderedDict()
pilot_tsv_json['dcc_project_code'] = reorganized_donor.get('dcc_project_code')
pilot_tsv_json['submitter_donor_id'] = reorganized_donor.get('submitter_donor_id')
pilot_tsv_json['data_train'] = reorganized_donor.get('data_train')
pilot_tsv_json['train2_pilot'] = reorganized_donor.get('train2_pilot')
# wgs normal specimen
pilot_tsv_json['normal_wgs_submitter_specimen_id'] = reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('submitter_specimen_id')
pilot_tsv_json['normal_wgs_submitter_sample_id'] = reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('submitter_sample_id')
pilot_tsv_json['normal_wgs_aliquot_id'] = reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('aliquot_id')
pilot_tsv_json['normal_wgs_alignment_gnos_repo'] = [reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('gnos_repo')]
pilot_tsv_json['normal_wgs_alignment_gnos_id'] = reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('gnos_id')
pilot_tsv_json['normal_wgs_alignment_bam_file_name'] = reorganized_donor.get('wgs').get('normal_specimen').get('bwa_alignment').get('files')[0].get('bam_file_name')
# wgs tumor specimen
wgs_tumor_speciments = reorganized_donor.get('wgs').get('tumor_specimens')
pilot_tsv_json['tumor_wgs_specimen_count'] = reorganized_donor.get('tumor_wgs_specimen_count')
pilot_tsv_json['tumor_wgs_submitter_specimen_id'] = []
pilot_tsv_json['tumor_wgs_submitter_sample_id'] = []
pilot_tsv_json['tumor_wgs_aliquot_id'] = []
pilot_tsv_json['tumor_wgs_alignment_gnos_repo'] = []
pilot_tsv_json['tumor_wgs_alignment_gnos_id'] = []
pilot_tsv_json['tumor_wgs_alignment_bam_file_name'] = []
# wgs tumor sanger vcf
pilot_tsv_json['sanger_variant_calling_repo'] = []
pilot_tsv_json['sanger_variant_calling_gnos_id'] = wgs_tumor_speciments[0].get('sanger_variant_calling').get('gnos_id')
pilot_tsv_json['sanger_variant_calling_file_name_prefix'] = []
for specimen in wgs_tumor_speciments:
pilot_tsv_json['tumor_wgs_submitter_specimen_id'].append(specimen.get('bwa_alignment').get('submitter_specimen_id'))
pilot_tsv_json['tumor_wgs_submitter_sample_id'].append(specimen.get('bwa_alignment').get('submitter_sample_id'))
pilot_tsv_json['tumor_wgs_aliquot_id'].append(specimen.get('bwa_alignment').get('aliquot_id'))
pilot_tsv_json['tumor_wgs_alignment_gnos_repo'].append(specimen.get('bwa_alignment').get('gnos_repo'))
pilot_tsv_json['tumor_wgs_alignment_gnos_id'].append(specimen.get('bwa_alignment').get('gnos_id'))
pilot_tsv_json['tumor_wgs_alignment_bam_file_name'].append(specimen.get('bwa_alignment').get('files')[0].get('bam_file_name'))
# wgs tumor sanger vcf
pilot_tsv_json['sanger_variant_calling_repo'].append(specimen.get('sanger_variant_calling').get('gnos_repo'))
pilot_tsv_json['sanger_variant_calling_file_name_prefix'].append(specimen.get('sanger_variant_calling').get('aliquot_id'))
# rna_seq normal specimen
pilot_tsv_json['normal_rna_seq_submitter_specimen_id'] = None
pilot_tsv_json['normal_rna_seq_submitter_sample_id'] = None
pilot_tsv_json['normal_rna_seq_aliquot_id'] = None
pilot_tsv_json['normal_rna_seq_STAR_alignment_gnos_repo'] = None
pilot_tsv_json['normal_rna_seq_STAR_alignment_gnos_id'] = None
pilot_tsv_json['normal_rna_seq_STAR_alignment_bam_file_name'] = None
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_gnos_repo'] = None
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_gnos_id'] = None
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_bam_file_name'] = None
rna_seq_normal = reorganized_donor.get('rna_seq').get('normal_specimen')
if rna_seq_normal and rna_seq_normal.get('tophat'):
pilot_tsv_json['normal_rna_seq_submitter_specimen_id'] = rna_seq_normal.get('tophat').get('submitter_specimen_id')
pilot_tsv_json['normal_rna_seq_submitter_sample_id'] = rna_seq_normal.get('tophat').get('submitter_sample_id')
pilot_tsv_json['normal_rna_seq_aliquot_id'] = rna_seq_normal.get('tophat').get('aliquot_id')
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_gnos_repo'] = [rna_seq_normal.get('tophat').get('gnos_repo')]
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_gnos_id'] = rna_seq_normal.get('tophat').get('gnos_id')
pilot_tsv_json['normal_rna_seq_TOPHAT2_alignment_bam_file_name'] = rna_seq_normal.get('tophat').get('files')[0].get('bam_file_name')
if rna_seq_normal and rna_seq_normal.get('star'):
pilot_tsv_json['normal_rna_seq_submitter_specimen_id'] = rna_seq_normal.get('star').get('submitter_specimen_id')
pilot_tsv_json['normal_rna_seq_submitter_sample_id'] = rna_seq_normal.get('star').get('submitter_sample_id')
pilot_tsv_json['normal_rna_seq_aliquot_id'] = rna_seq_normal.get('star').get('aliquot_id')
pilot_tsv_json['normal_rna_seq_STAR_alignment_gnos_repo'] = rna_seq_normal.get('star').get('gnos_repo')
pilot_tsv_json['normal_rna_seq_STAR_alignment_gnos_id'] = rna_seq_normal.get('star').get('gnos_id')
pilot_tsv_json['normal_rna_seq_STAR_alignment_bam_file_name'] = rna_seq_normal.get('star').get('files')[0].get('bam_file_name')
# rna_seq tumor specimens
pilot_tsv_json['tumor_rna_seq_submitter_specimen_id'] = []
pilot_tsv_json['tumor_rna_seq_submitter_sample_id'] = []
pilot_tsv_json['tumor_rna_seq_aliquot_id'] = []
pilot_tsv_json['tumor_rna_seq_STAR_alignment_gnos_repo'] = []
pilot_tsv_json['tumor_rna_seq_STAR_alignment_gnos_id'] = []
pilot_tsv_json['tumor_rna_seq_STAR_alignment_bam_file_name'] = []
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_gnos_repo'] = []
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_gnos_id'] = []
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_bam_file_name'] = []
rna_seq_tumor = reorganized_donor.get('rna_seq').get('tumor_specimens')
rna_seq_tumor_specimen_id = []
rna_seq_tumor_sample_id = []
rna_seq_tumor_aliquot_id = []
if rna_seq_tumor:
for rna_seq_tumor_specimen in rna_seq_tumor:
if rna_seq_tumor_specimen.get('tophat'):
rna_seq_tumor_specimen_id_tmp = rna_seq_tumor_specimen.get('tophat').get('submitter_specimen_id')
rna_seq_tumor_sample_id_tmp = rna_seq_tumor_specimen.get('tophat').get('submitter_sample_id')
rna_seq_tumor_aliquot_id_tmp = rna_seq_tumor_specimen.get('tophat').get('aliquot_id')
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_gnos_repo'].append(rna_seq_tumor_specimen.get('tophat').get('gnos_repo'))
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_gnos_id'].append(rna_seq_tumor_specimen.get('tophat').get('gnos_id'))
pilot_tsv_json['tumor_rna_seq_TOPHAT2_alignment_bam_file_name'].append(rna_seq_tumor_specimen.get('tophat').get('files')[0].get('bam_file_name'))
if rna_seq_tumor_specimen.get('star'):
rna_seq_tumor_specimen_id_tmp = rna_seq_tumor_specimen.get('star').get('submitter_specimen_id')
rna_seq_tumor_sample_id_tmp = rna_seq_tumor_specimen.get('star').get('submitter_sample_id')
rna_seq_tumor_aliquot_id_tmp = rna_seq_tumor_specimen.get('star').get('aliquot_id')
pilot_tsv_json['tumor_rna_seq_STAR_alignment_gnos_repo'].append(rna_seq_tumor_specimen.get('star').get('gnos_repo'))
pilot_tsv_json['tumor_rna_seq_STAR_alignment_gnos_id'].append(rna_seq_tumor_specimen.get('star').get('gnos_id'))
pilot_tsv_json['tumor_rna_seq_STAR_alignment_bam_file_name'].append(rna_seq_tumor_specimen.get('star').get('files')[0].get('bam_file_name'))
rna_seq_tumor_specimen_id.append(rna_seq_tumor_specimen_id_tmp)
rna_seq_tumor_sample_id.append(rna_seq_tumor_sample_id_tmp)
rna_seq_tumor_aliquot_id.append(rna_seq_tumor_aliquot_id_tmp)
pilot_tsv_json['tumor_rna_seq_submitter_specimen_id'] = rna_seq_tumor_specimen_id
pilot_tsv_json['tumor_rna_seq_submitter_sample_id'] = rna_seq_tumor_sample_id
pilot_tsv_json['tumor_rna_seq_aliquot_id'] = rna_seq_tumor_aliquot_id
return pilot_tsv_json
def write_individule_json(report_dir, donor_unique_id, reorganized_donor):
(project_code, donor_id) = donor_unique_id.split('::')
project_dir = report_dir + '/donors/' + project_code
if not os.path.exists(project_dir): os.makedirs(project_dir)
with open(project_dir + '/' + donor_id + '.json', 'w') as w:
w.write(json.dumps(reorganized_donor, indent=4, sort_keys=True))
def main(argv=None):
parser = ArgumentParser(description="PCAWG Reorganized Json Donors Info Generator",
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-m", "--metadata_dir", dest="metadata_dir",
help="Directory containing metadata manifest files", required=True)
parser.add_argument("-r", "--gnos_repo", dest="repo",
help="Specify which GNOS repo to process, process all repos if none specified", required=False)
args = parser.parse_args()
metadata_dir = args.metadata_dir # this dir contains gnos manifest files, will also host all reports
repo = args.repo
if not os.path.isdir(metadata_dir): # TODO: should add more directory name check to make sure it's right
sys.exit('Error: specified metadata directory does not exist!')
timestamp = str.split(metadata_dir, '/')[-1]
es_index = 'p_' + ('' if not repo else repo+'_') + re.sub(r'\D', '', timestamp).replace('20','',1)
es_index_reorganize = 'r_' + ('' if not repo else repo+'_') + re.sub(r'\D', '', timestamp).replace('20','',1)
es_type = "donor"
es_host = 'localhost:9200'
es = Elasticsearch([es_host])
#es_reorganized = init_es(es_host, es_index_reorganize)
donor_fh = open(metadata_dir+'/reports/donors_all.jsonl', 'w')
# pilot_tsv_fh = open(metadata_dir + '/reports/donors_with_bwa_alignment.tsv', 'w')
# # read the tsv fields file and write to the pilot donor tsv file
# tsv_fields = ["Project code", "Submitter donor ID", "Data train", "Train2 pilot", "Normal WGS submitter specimen ID", \
# "Normal WGS submitter sample ID", "Normal WGS aliquot ID", "Normal WGS alignment GNOS repo(s)", "Normal WGS alignment GNOS analysis ID", \
# "Normal WGS alignment BAM file name", "Tumour WGS Specimen Count", "Tumour WGS submitter specimen ID(s)", \
# "Tumour WGS submitter sample ID(s)", "Tumour WGS aliquot ID(s)", "Tumour WGS alignment GNOS repo(s)", \
# "Tumour WGS alignment GNOS analysis ID(s)", "Tumour WGS alignment BAM file name(s)", "Sanger variant call GNOS repo(s)", \
# "Sanger variant call GNOS analysis ID(s)", "Sanger variant call GNOS file name prefix", "Normal RNA-Seq submitter specimen ID", \
# "Normal RNA-Seq submitter sample ID", "Normal RNA-Seq aliquot ID", "Normal RNA-Seq STAR alignment GNOS repo(s)", \
# "Normal RNA-Seq STAR alignment GNOS analysis ID", "Normal RNA-Seq STAR alignment BAM file name", \
# "Normal RNA-Seq TopHat2 alignment GNOS repo(s)", "Normal RNA-Seq TopHat2 alignment GNOS analysis ID", \
# "Normal RNA-Seq TopHat2 alignment BAM file name", "Tumour RNA-Seq submitter specimen ID", "Tumour RNA-Seq submitter sample ID", \
# "Tumour RNA-Seq aliquot ID", "Tumour RNA-Seq STAR alignment GNOS repo(s)", "Tumour RNA-Seq STAR alignment GNOS analysis ID", \
# "Tumour RNA-Seq STAR alignment BAM file name", "Tumour RNA-Seq TopHat2 alignment GNOS repo(s)", \
# "Tumour RNA-Seq TopHat2 alignment GNOS analysis ID", "Tumour RNA-Seq TopHat2 alignment BAM file name"
# ]
# pilot_tsv_fh.write('\t'.join(tsv_fields) + '\n')
# get the list of donors whose sanger_vcf without missing bams
donors_list = get_donors_list(es, es_index, es_queries)
# get json doc for each donor and reorganize it
for donor_unique_id in donors_list:
es_json = get_donor_json(es, es_index, donor_unique_id)
reorganized_donor = create_reorganized_donor(donor_unique_id, es_json)
write_individule_json(metadata_dir+'/reports/', donor_unique_id, reorganized_donor)
# DO NOT NEED THIS YET: push to Elasticsearch
#es_reorganized.index(index=es_index_reorganize, doc_type='donor', id=reorganized_donor['donor_unique_id'], \
# body=json.loads(json.dumps(reorganized_donor, default=set_default)), timeout=90 )
donor_fh.write(json.dumps(reorganized_donor, default=set_default) + '\n')
'''
# generate json for tsv file from reorganized donor
pilot_tsv_json = generate_json_for_tsv_file(reorganized_donor)
# write to the tsv file
line = ""
for p in pilot_tsv_json.keys():
if isinstance(pilot_tsv_json.get(p), list):
if pilot_tsv_json.get(p):
count0 = 0
for q in pilot_tsv_json.get(p):
if isinstance(q, list):
if q:
line += '|'.join(q)
else:
line += ''
else:
line += str(q) if q else ''
count0 = count0 + 1
if count0 < len(pilot_tsv_json.get(p)):
line += ','
else:
line += '' # None as empty string
else:
line += str(pilot_tsv_json.get(p)) if pilot_tsv_json.get(p) is not None else ''
line += '\t' # field ends
line = line[:-1] # remove the last unwanted '\t'
pilot_tsv_fh.write(line + '\n')
'''
#pilot_tsv_fh.close()
donor_fh.close()
return 0
if __name__ == "__main__":
sys.exit(main())
| gpl-2.0 | 5,619,339,383,752,055,000 | -7,467,174,387,839,363,000 | 51.758621 | 168 | 0.615891 | false |
jeffmarcom/checkbox | checkbox/lib/conversion.py | 2 | 5369 | #
# This file is part of Checkbox.
#
# Copyright 2008 Canonical Ltd.
#
# Checkbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Checkbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Checkbox. If not, see <http://www.gnu.org/licenses/>.
#
import re
from datetime import (
datetime,
timedelta,
)
from checkbox.lib.tz import tzutc
DATETIME_RE = re.compile(r"""
^(?P<year>\d\d\d\d)-?(?P<month>\d\d)-?(?P<day>\d\d)
T(?P<hour>\d\d):?(?P<minute>\d\d):?(?P<second>\d\d)
(?:\.(?P<second_fraction>\d{0,6}))?
(?P<tz>
(?:(?P<tz_sign>[-+])(?P<tz_hour>\d\d):(?P<tz_minute>\d\d))
| Z)?$
""", re.VERBOSE)
TYPE_FORMATS = (
(r"(yes|true)", lambda v: True),
(r"(no|false)", lambda v: False),
(r"-?\d+", lambda v: int(v.group(0))),
(r"-?\d+\.\d+", lambda v: float(v.group(0))),
(r"(-?\d+) ?([kmgt]?b?)", lambda v: int(v.group(1))),
(r"(-?\d+\.\d+) ?([kmgt]?b?)", lambda v: float(v.group(1))),
(r"(-?\d+) ?([kmgt]?hz)", lambda v: int(v.group(1))),
(r"(-?\d+\.\d+) ?([kmgt]?hz)", lambda v: float(v.group(1))))
TYPE_FORMATS = tuple(
(re.compile(r"^%s$" % pattern, re.IGNORECASE), format)
for pattern, format in TYPE_FORMATS)
TYPE_MULTIPLIERS = (
(r"b", 1),
(r"kb?", 1024),
(r"mb?", 1024 * 1024),
(r"gb?", 1024 * 1024 * 1024),
(r"tb?", 1024 * 1024 * 1024 * 1024),
(r"hz", 1),
(r"khz?", 1024),
(r"mhz?", 1024 * 1024),
(r"ghz?", 1024 * 1024 * 1024),
(r"thz?", 1024 * 1024 * 1024 * 1024))
TYPE_MULTIPLIERS = tuple(
(re.compile(r"^%s$" % pattern, re.IGNORECASE), multiplier)
for pattern, multiplier in TYPE_MULTIPLIERS)
def datetime_to_string(dt):
"""Return a consistent string representation for a given datetime.
:param dt: The datetime object.
"""
return dt.isoformat()
def string_to_datetime(string):
"""Return a datetime object from a consistent string representation.
:param string: The string representation.
"""
# we cannot use time.strptime: this function accepts neither fractions
# of a second nor a time zone given e.g. as '+02:30'.
match = DATETIME_RE.match(string)
# The Relax NG schema allows a leading minus sign and year numbers
# with more than four digits, which are not "covered" by _time_regex.
if not match:
raise ValueError("Datetime with unreasonable value: %s" % string)
time_parts = match.groupdict()
year = int(time_parts['year'])
month = int(time_parts['month'])
day = int(time_parts['day'])
hour = int(time_parts['hour'])
minute = int(time_parts['minute'])
second = int(time_parts['second'])
second_fraction = time_parts['second_fraction']
if second_fraction is not None:
milliseconds = second_fraction + '0' * (6 - len(second_fraction))
milliseconds = int(milliseconds)
else:
milliseconds = 0
# The Relax NG validator accepts leap seconds, but the datetime
# constructor rejects them. The time values submitted by the HWDB
# client are not necessarily very precise, hence we can round down
# to 59.999999 seconds without losing any real precision.
if second > 59:
second = 59
milliseconds = 999999
dt = datetime(
year, month, day, hour, minute, second, milliseconds, tzinfo=tzutc)
tz_sign = time_parts['tz_sign']
tz_hour = time_parts['tz_hour']
tz_minute = time_parts['tz_minute']
if tz_sign in ('-', '+'):
delta = timedelta(hours=int(tz_hour), minutes=int(tz_minute))
if tz_sign == '-':
dt = dt + delta
else:
dt = dt - delta
return dt
def sizeof_bytes(bytes):
for x in ["bytes", "KB", "MB", "GB", "TB"]:
string = "%3.1f%s" % (bytes, x)
if bytes < 1024.0:
break
bytes /= 1024.0
return string
def sizeof_hertz(hertz):
for x in ["Hz", "KHz", "MHz", "GHz"]:
string = "%3.1f%s" % (hertz, x)
if hertz < 1000.0:
break
hertz /= 1000.0
return string
def string_to_type(string):
"""Return a typed representation for the given string.
The result might be a bool, int or float. The string might also be
supplemented by a multiplier like KB which would return an int or
float multiplied by 1024 for example.
:param string: The string representation.
"""
for regex, formatter in TYPE_FORMATS:
match = regex.match(string)
if match:
string = formatter(match)
if len(match.groups()) > 1:
unit = match.group(2)
for regex, multiplier in TYPE_MULTIPLIERS:
match = regex.match(unit)
if match:
string *= multiplier
break
else:
raise ValueError("Unknown multiplier: %s" % unit)
break
return string
| gpl-3.0 | 3,547,934,388,563,329,500 | -268,488,830,953,721,470 | 30.215116 | 75 | 0.590799 | false |
RaoUmer/django | django/core/files/storage.py | 97 | 10905 | import os
import errno
try:
from urllib.parse import urljoin
except ImportError: # Python 2
from urlparse import urljoin
import itertools
from datetime import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.core.files import locks, File
from django.core.files.move import file_move_safe
from django.utils.encoding import force_text, filepath_to_uri
from django.utils.functional import LazyObject
from django.utils.importlib import import_module
from django.utils.text import get_valid_filename
from django.utils._os import safe_join, abspathu
__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
class Storage(object):
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
"""
return self._open(name, mode)
def save(self, name, content):
"""
Saves new content to the file specified by name. The content should be a
proper File object, ready to be read from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
name = self.get_available_name(name)
name = self._save(name, content)
# Store filenames with forward slashes, even on Windows
return force_text(name.replace('\\', '/'))
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Returns a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_available_name(self, name):
"""
Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, add an underscore and a number (before
# the file extension, if one exists) to the filename until the generated
# filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" % (file_root, next(count), file_ext))
return name
def path(self, name):
"""
Returns a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Deletes the specified file from the storage system.
"""
raise NotImplementedError()
def exists(self, name):
"""
Returns True if a file referened by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError()
def listdir(self, path):
"""
Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
raise NotImplementedError()
def size(self, name):
"""
Returns the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError()
def url(self, name):
"""
Returns an absolute URL where the file's contents can be accessed
directly by a Web browser.
"""
raise NotImplementedError()
def accessed_time(self, name):
"""
Returns the last accessed time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError()
def created_time(self, name):
"""
Returns the creation time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError()
def modified_time(self, name):
"""
Returns the last modified time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError()
class FileSystemStorage(Storage):
"""
Standard filesystem storage
"""
def __init__(self, location=None, base_url=None):
if location is None:
location = settings.MEDIA_ROOT
self.base_location = location
self.location = abspathu(self.base_location)
if base_url is None:
base_url = settings.MEDIA_URL
self.base_url = base_url
def _open(self, name, mode='rb'):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
# Create any intermediate directories that do not exist.
# Note that there is a race between os.path.exists and os.makedirs:
# if os.makedirs fails with EEXIST, the directory was created
# concurrently, and we can continue normally. Refs #16082.
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
file_move_safe(content.temporary_file_path(), full_path)
content.close()
# This is a normal uploadedfile that we can stream.
else:
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL |
getattr(os, 'O_BINARY', 0))
# The current umask value is masked out by os.open!
fd = os.open(full_path, flags, 0o666)
try:
locks.lock(fd, locks.LOCK_EX)
_file = None
for chunk in content.chunks():
if _file is None:
mode = 'wb' if isinstance(chunk, bytes) else 'wt'
_file = os.fdopen(fd, mode)
_file.write(chunk)
finally:
locks.unlock(fd)
if _file is not None:
_file.close()
else:
os.close(fd)
except OSError as e:
if e.errno == errno.EEXIST:
# Ooops, the file exists. We need a new file name.
name = self.get_available_name(name)
full_path = self.path(name)
else:
raise
else:
# OK, the file save worked. Break out of the loop.
break
if settings.FILE_UPLOAD_PERMISSIONS is not None:
os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
return name
def delete(self, name):
name = self.path(name)
# If the file exists, delete it from the filesystem.
# Note that there is a race between os.path.exists and os.remove:
# if os.remove fails with ENOENT, the file was removed
# concurrently, and we can continue normally.
if os.path.exists(name):
try:
os.remove(name)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def exists(self, name):
return os.path.exists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
for entry in os.listdir(path):
if os.path.isdir(os.path.join(path, entry)):
directories.append(entry)
else:
files.append(entry)
return directories, files
def path(self, name):
try:
path = safe_join(self.location, name)
except ValueError:
raise SuspiciousOperation("Attempted access to '%s' denied." % name)
return os.path.normpath(path)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urljoin(self.base_url, filepath_to_uri(name))
def accessed_time(self, name):
return datetime.fromtimestamp(os.path.getatime(self.path(name)))
def created_time(self, name):
return datetime.fromtimestamp(os.path.getctime(self.path(name)))
def modified_time(self, name):
return datetime.fromtimestamp(os.path.getmtime(self.path(name)))
def get_storage_class(import_path=None):
if import_path is None:
import_path = settings.DEFAULT_FILE_STORAGE
try:
dot = import_path.rindex('.')
except ValueError:
raise ImproperlyConfigured("%s isn't a storage module." % import_path)
module, classname = import_path[:dot], import_path[dot+1:]
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured('Error importing storage module %s: "%s"' % (module, e))
try:
return getattr(mod, classname)
except AttributeError:
raise ImproperlyConfigured('Storage module "%s" does not define a "%s" class.' % (module, classname))
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class()()
default_storage = DefaultStorage()
| bsd-3-clause | 2,307,601,884,980,564,000 | 1,571,781,130,207,072,800 | 35.35 | 109 | 0.593489 | false |
igor-rangel7l/igorrangel.repository | plugin.video.SportsDevil/lib/utils/xbmcUtils.py | 27 | 2568 | # -*- coding: utf-8 -*-
import xbmcgui, xbmc, xbmcplugin
enable_debug = False
#######################################
# Xbmc Helpers
#######################################
def select(title, menuItems):
select = xbmcgui.Dialog().select(title, menuItems)
if select == -1:
return None
else:
return menuItems[select]
def getKeyboard(default = '', heading = '', hidden = False):
kboard = xbmc.Keyboard(default, heading, hidden)
kboard.doModal()
if kboard.isConfirmed():
return kboard.getText()
return ''
def getImage(title):
dialog = xbmcgui.Dialog()
image = dialog.browse(1, title, 'pictures', '.jpg|.png', True)
return image
def showMessage(msg):
xbmc.executebuiltin('Notification(SportsDevil,' + str(msg.encode('utf-8', 'ignore')) + ')')
def showBusyAnimation():
xbmc.executebuiltin( 'ActivateWindow(busydialog)' )
def hideBusyAnimation():
xbmc.executebuiltin( 'Dialog.Close(busydialog,true)' )
def closeAllDialogs():
xbmc.executebuiltin('Dialog.Close(all, true)')
def log(msg):
if enable_debug:
try:
xbmc.log(msg)
except:
xbmc.log(msg.encode('utf-8'))
def setSortMethodsForCurrentXBMCList(handle, sortKeys):
def addSortMethod(method):
xbmcplugin.addSortMethod(handle = handle, sortMethod = method)
if not sortKeys or sortKeys==[]:
addSortMethod(xbmcplugin.SORT_METHOD_UNSORTED)
else:
if 'name' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_LABEL)
if 'size' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_SIZE)
if 'duration' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_DURATION)
if 'genre' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_GENRE)
if 'rating' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_VIDEO_RATING)
if 'date' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_DATE)
if 'file' in sortKeys:
addSortMethod(xbmcplugin.SORT_METHOD_FILE)
def getContainerFolderPath():
return xbmc.getInfoLabel('Container.FolderPath')
def getListItemPath():
return xbmc.getInfoLabel('ListItem.Path')
def getCurrentWindow():
return xbmc.getInfoLabel('System.CurrentWindow')
def getCurrentControl():
return xbmc.getInfoLabel('System.CurrentControl')
def getCurrentWindowXmlFile():
return xbmc.getInfoLabel('Window.Property(xmlfile)') | gpl-2.0 | -62,284,588,977,779,170 | 4,937,604,970,780,323,000 | 26.329787 | 95 | 0.623442 | false |
bplancher/odoo | addons/sale_stock/tests/test_sale_stock.py | 18 | 12679 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.addons.sale.tests.test_sale_common import TestSale
from openerp.exceptions import UserError
class TestSaleStock(TestSale):
def test_00_sale_stock_invoice(self):
"""
Test SO's changes when playing around with stock moves, quants, pack operations, pickings
and whatever other model there is in stock with "invoice on delivery" products
"""
inv_obj = self.env['account.invoice']
self.so = self.env['sale.order'].create({
'partner_id': self.partner.id,
'partner_invoice_id': self.partner.id,
'partner_shipping_id': self.partner.id,
'order_line': [(0, 0, {'name': p.name, 'product_id': p.id, 'product_uom_qty': 2, 'product_uom': p.uom_id.id, 'price_unit': p.list_price}) for (_, p) in self.products.iteritems()],
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
})
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" stockable products')
# invoice on order
self.so.action_invoice_create()
# deliver partially, check the so's invoice_status and delivered quantities
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "nothing to invoice" after invoicing')
pick = self.so.picking_ids
pick.force_assign()
pick.pack_operation_product_ids.write({'qty_done': 1})
wiz_act = pick.do_new_transfer()
wiz = self.env[wiz_act['res_model']].browse(wiz_act['res_id'])
wiz.process()
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" after partial delivery')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [1.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after partial delivery')
# invoice on delivery: only stockable products
inv_id = self.so.action_invoice_create()
inv_1 = inv_obj.browse(inv_id)
self.assertTrue(all([il.product_id.invoice_policy == 'delivery' for il in inv_1.invoice_line_ids]),
'Sale Stock: invoice should only contain "invoice on delivery" products')
# complete the delivery and check invoice_status again
self.assertEqual(self.so.invoice_status, 'no',
'Sale Stock: so invoice_status should be "nothing to invoice" after partial delivery and invoicing')
self.assertEqual(len(self.so.picking_ids), 2, 'Sale Stock: number of pickings should be 2')
pick_2 = self.so.picking_ids[0]
pick_2.force_assign()
pick_2.pack_operation_product_ids.write({'qty_done': 1})
self.assertIsNone(pick_2.do_new_transfer(), 'Sale Stock: second picking should be final without need for a backorder')
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" after complete delivery')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [2.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after complete delivery')
# invoice on delivery
inv_id = self.so.action_invoice_create()
self.assertEqual(self.so.invoice_status, 'invoiced',
'Sale Stock: so invoice_status should be "fully invoiced" after complete delivery and invoicing')
def test_01_sale_stock_order(self):
"""
Test SO's changes when playing around with stock moves, quants, pack operations, pickings
and whatever other model there is in stock with "invoice on order" products
"""
# let's cheat and put all our products to "invoice on order"
self.so = self.env['sale.order'].create({
'partner_id': self.partner.id,
'partner_invoice_id': self.partner.id,
'partner_shipping_id': self.partner.id,
'order_line': [(0, 0, {'name': p.name, 'product_id': p.id, 'product_uom_qty': 2, 'product_uom': p.uom_id.id, 'price_unit': p.list_price}) for (_, p) in self.products.iteritems()],
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
})
for sol in self.so.order_line:
sol.product_id.invoice_policy = 'order'
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on order" stockable products')
# let's do an invoice for a deposit of 5%
adv_wiz = self.env['sale.advance.payment.inv'].with_context(active_ids=[self.so.id]).create({
'advance_payment_method': 'percentage',
'amount': 5.0,
'product_id': self.env.ref('sale.advance_product_0').id,
})
act = adv_wiz.with_context(open_invoices=True).create_invoices()
inv = self.env['account.invoice'].browse(act['res_id'])
self.assertEqual(inv.amount_untaxed, self.so.amount_untaxed * 5.0 / 100.0, 'Sale Stock: deposit invoice is wrong')
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so should be to invoice after invoicing deposit')
# invoice on order: everything should be invoiced
self.so.action_invoice_create(final=True)
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so should be fully invoiced after second invoice')
# deliver, check the delivered quantities
pick = self.so.picking_ids
pick.force_assign()
pick.pack_operation_product_ids.write({'qty_done': 2})
self.assertIsNone(pick.do_new_transfer(), 'Sale Stock: complete delivery should not need a backorder')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [2.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after partial delivery')
# invoice on delivery: nothing to invoice
with self.assertRaises(UserError):
self.so.action_invoice_create()
def test_02_sale_stock_return(self):
"""
Test a SO with a product invoiced on delivery. Deliver and invoice the SO, then do a return
of the picking. Check that a refund invoice is well generated.
"""
# intial so
self.partner = self.env.ref('base.res_partner_1')
self.product = self.env.ref('product.product_product_47')
so_vals = {
'partner_id': self.partner.id,
'partner_invoice_id': self.partner.id,
'partner_shipping_id': self.partner.id,
'order_line': [(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 5.0,
'product_uom': self.product.uom_id.id,
'price_unit': self.product.list_price})],
'pricelist_id': self.env.ref('product.list0').id,
}
self.so = self.env['sale.order'].create(so_vals)
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" stockable products')
# invoice in on delivery, nothing should be invoiced
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "nothing to invoice"')
# deliver completely
pick = self.so.picking_ids
pick.force_assign()
pick.pack_operation_product_ids.write({'qty_done': 5})
pick.do_new_transfer()
# Check quantity delivered
del_qty = sum(sol.qty_delivered for sol in self.so.order_line)
self.assertEqual(del_qty, 5.0, 'Sale Stock: delivered quantity should be 5.0 after complete delivery')
# Check invoice
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" before invoicing')
inv_1_id = self.so.action_invoice_create()
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so invoice_status should be "invoiced" after invoicing')
self.assertEqual(len(inv_1_id), 1, 'Sale Stock: only one invoice should be created')
self.inv_1 = self.env['account.invoice'].browse(inv_1_id)
self.assertEqual(self.inv_1.amount_untaxed, self.inv_1.amount_untaxed, 'Sale Stock: amount in SO and invoice should be the same')
# Create return picking
StockReturnPicking = self.env['stock.return.picking']
default_data = StockReturnPicking.with_context(active_ids=pick.ids, active_id=pick.ids[0]).default_get(['move_dest_exists', 'original_location_id', 'product_return_moves', 'parent_location_id', 'location_id'])
return_wiz = StockReturnPicking.with_context(active_ids=pick.ids, active_id=pick.ids[0]).create(default_data)
res = return_wiz.create_returns()
return_pick = self.env['stock.picking'].browse(res['res_id'])
# Validate picking
return_pick.force_assign()
return_pick.pack_operation_product_ids.write({'qty_done': 5})
return_pick.do_new_transfer()
# Check invoice
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so invoice_status should be "invoiced" after picking return')
def test_03_sale_stock_delivery_partial(self):
"""
Test a SO with a product invoiced on delivery. Deliver partially and invoice the SO, when
the SO is set on 'done', the SO should be fully invoiced.
"""
# intial so
self.partner = self.env.ref('base.res_partner_1')
self.product = self.env.ref('product.product_product_47')
so_vals = {
'partner_id': self.partner.id,
'partner_invoice_id': self.partner.id,
'partner_shipping_id': self.partner.id,
'order_line': [(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 5.0,
'product_uom': self.product.uom_id.id,
'price_unit': self.product.list_price})],
'pricelist_id': self.env.ref('product.list0').id,
}
self.so = self.env['sale.order'].create(so_vals)
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" stockable products')
# invoice in on delivery, nothing should be invoiced
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "nothing to invoice"')
# deliver partially
pick = self.so.picking_ids
pick.force_assign()
pick.pack_operation_product_ids.write({'qty_done': 4})
backorder_wiz_id = pick.do_new_transfer()['res_id']
backorder_wiz = self.env['stock.backorder.confirmation'].browse([backorder_wiz_id])
backorder_wiz.process_cancel_backorder()
# Check quantity delivered
del_qty = sum(sol.qty_delivered for sol in self.so.order_line)
self.assertEqual(del_qty, 4.0, 'Sale Stock: delivered quantity should be 4.0 after partial delivery')
# Check invoice
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" before invoicing')
inv_1_id = self.so.action_invoice_create()
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "no" after invoicing')
self.assertEqual(len(inv_1_id), 1, 'Sale Stock: only one invoice should be created')
self.inv_1 = self.env['account.invoice'].browse(inv_1_id)
self.assertEqual(self.inv_1.amount_untaxed, self.inv_1.amount_untaxed, 'Sale Stock: amount in SO and invoice should be the same')
self.so.action_done()
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so invoice_status should be "invoiced" when set to done')
| agpl-3.0 | 5,826,716,658,876,835,000 | -2,343,536,570,778,875,000 | 56.371041 | 217 | 0.641691 | false |
tbabej/astropy | astropy/nddata/mixins/tests/test_ndslicing.py | 2 | 5027 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# TEST_UNICODE_LITERALS
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy.testing import assert_array_equal
from ... import NDData, NDSlicingMixin
from ...nduncertainty import NDUncertainty, StdDevUncertainty
from ....tests.helper import pytest
from .... import units as u
# Just add the Mixin to NDData
# TODO: Make this use NDDataRef instead!
class NDDataSliceable(NDSlicingMixin, NDData):
pass
# Just some uncertainty (following the StdDevUncertainty implementation of
# storing the uncertainty in a property 'array') with slicing.
class SomeUncertainty(NDUncertainty):
@property
def uncertainty_type(self):
return 'fake'
def _propagate_add(self, data, final_data):
pass
def _propagate_subtract(self, data, final_data):
pass
def _propagate_multiply(self, data, final_data):
pass
def _propagate_divide(self, data, final_data):
pass
def test_slicing_only_data():
data = np.arange(10)
nd = NDDataSliceable(data)
nd2 = nd[2:5]
assert_array_equal(data[2:5], nd2.data)
def test_slicing_data_scalar_fail():
data = np.array(10)
nd = NDDataSliceable(data)
with pytest.raises(TypeError): # as exc
nd[:]
# assert exc.value.args[0] == 'Scalars cannot be sliced.'
def test_slicing_1ddata_ndslice():
data = np.array([10, 20])
nd = NDDataSliceable(data)
# Standard numpy warning here:
with pytest.raises(IndexError):
nd[:, :]
@pytest.mark.parametrize('prop_name', ['mask', 'wcs', 'uncertainty'])
def test_slicing_1dmask_ndslice(prop_name):
# Data is 2d but mask only 1d so this should let the IndexError when
# slicing the mask rise to the user.
data = np.ones((3, 3))
kwarg = {prop_name: np.ones(3)}
nd = NDDataSliceable(data, **kwarg)
# Standard numpy warning here:
with pytest.raises(IndexError):
nd[:, :]
def test_slicing_all_npndarray_1d():
data = np.arange(10)
mask = data > 3
uncertainty = np.linspace(10, 20, 10)
wcs = np.linspace(1, 1000, 10)
# Just to have them too
unit = u.s
meta = {'observer': 'Brian'}
nd = NDDataSliceable(data, mask=mask, uncertainty=uncertainty, wcs=wcs,
unit=unit, meta=meta)
nd2 = nd[2:5]
assert_array_equal(data[2:5], nd2.data)
assert_array_equal(mask[2:5], nd2.mask)
assert_array_equal(uncertainty[2:5], nd2.uncertainty.array)
assert_array_equal(wcs[2:5], nd2.wcs)
assert unit is nd2.unit
assert meta == nd.meta
def test_slicing_all_npndarray_nd():
# See what happens for multidimensional properties
data = np.arange(1000).reshape(10, 10, 10)
mask = data > 3
uncertainty = np.linspace(10, 20, 1000).reshape(10, 10, 10)
wcs = np.linspace(1, 1000, 1000).reshape(10, 10, 10)
nd = NDDataSliceable(data, mask=mask, uncertainty=uncertainty, wcs=wcs)
# Slice only 1D
nd2 = nd[2:5]
assert_array_equal(data[2:5], nd2.data)
assert_array_equal(mask[2:5], nd2.mask)
assert_array_equal(uncertainty[2:5], nd2.uncertainty.array)
assert_array_equal(wcs[2:5], nd2.wcs)
# Slice 3D
nd2 = nd[2:5, :, 4:7]
assert_array_equal(data[2:5, :, 4:7], nd2.data)
assert_array_equal(mask[2:5, :, 4:7], nd2.mask)
assert_array_equal(uncertainty[2:5, :, 4:7], nd2.uncertainty.array)
assert_array_equal(wcs[2:5, :, 4:7], nd2.wcs)
def test_slicing_all_npndarray_shape_diff():
data = np.arange(10)
mask = (data > 3)[0:9]
uncertainty = np.linspace(10, 20, 15)
wcs = np.linspace(1, 1000, 12)
nd = NDDataSliceable(data, mask=mask, uncertainty=uncertainty, wcs=wcs)
nd2 = nd[2:5]
assert_array_equal(data[2:5], nd2.data)
# All are sliced even if the shapes differ (no Info)
assert_array_equal(mask[2:5], nd2.mask)
assert_array_equal(uncertainty[2:5], nd2.uncertainty.array)
assert_array_equal(wcs[2:5], nd2.wcs)
def test_slicing_all_something_wrong():
data = np.arange(10)
mask = [False]*10
uncertainty = {'rdnoise': 2.9, 'gain': 1.4}
wcs = 145 * u.degree
nd = NDDataSliceable(data, mask=mask, uncertainty=uncertainty, wcs=wcs)
nd2 = nd[2:5]
# Sliced properties:
assert_array_equal(data[2:5], nd2.data)
assert_array_equal(mask[2:5], nd2.mask)
# Not sliced attributes (they will raise a Info nevertheless)
uncertainty is nd2.uncertainty
assert_array_equal(wcs, nd2.wcs)
def test_boolean_slicing():
data = np.arange(10)
mask = data.copy()
uncertainty = StdDevUncertainty(data.copy())
wcs = data.copy()
nd = NDDataSliceable(data, mask=mask, uncertainty=uncertainty, wcs=wcs)
nd2 = nd[(nd.data >= 3) & (nd.data < 8)]
assert_array_equal(data[3:8], nd2.data)
assert_array_equal(mask[3:8], nd2.mask)
assert_array_equal(wcs[3:8], nd2.wcs)
assert_array_equal(uncertainty.array[3:8], nd2.uncertainty.array)
| bsd-3-clause | 7,237,675,727,633,053,000 | 2,834,456,711,010,175,500 | 29.840491 | 75 | 0.654665 | false |
ajs124/esp-idf | tools/idf_monitor.py | 3 | 20709 | #!/usr/bin/env python
#
# esp-idf serial output monitor tool. Does some helpful things:
# - Looks up hex addresses in ELF file with addr2line
# - Reset ESP32 via serial RTS line (Ctrl-T Ctrl-R)
# - Run "make flash" (Ctrl-T Ctrl-F)
# - Run "make app-flash" (Ctrl-T Ctrl-A)
# - If gdbstub output is detected, gdb is automatically loaded
#
# Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contains elements taken from miniterm "Very simple serial terminal" which
# is part of pySerial. https://github.com/pyserial/pyserial
# (C)2002-2015 Chris Liechti <[email protected]>
#
# Originally released under BSD-3-Clause license.
#
from __future__ import print_function, division
import subprocess
import argparse
import codecs
import re
import os
try:
import queue
except ImportError:
import Queue as queue
import time
import sys
import serial
import serial.tools.miniterm as miniterm
import threading
import ctypes
import types
from distutils.version import StrictVersion
key_description = miniterm.key_description
# Control-key characters
CTRL_A = '\x01'
CTRL_B = '\x02'
CTRL_F = '\x06'
CTRL_H = '\x08'
CTRL_R = '\x12'
CTRL_T = '\x14'
CTRL_RBRACKET = '\x1d' # Ctrl+]
# ANSI terminal codes
ANSI_RED = '\033[1;31m'
ANSI_YELLOW = '\033[0;33m'
ANSI_NORMAL = '\033[0m'
def color_print(message, color):
""" Print a message to stderr with colored highlighting """
sys.stderr.write("%s%s%s\n" % (color, message, ANSI_NORMAL))
def yellow_print(message):
color_print(message, ANSI_YELLOW)
def red_print(message):
color_print(message, ANSI_RED)
__version__ = "1.0"
# Tags for tuples in queues
TAG_KEY = 0
TAG_SERIAL = 1
# regex matches an potential PC value (0x4xxxxxxx)
MATCH_PCADDR = re.compile(r'0x4[0-9a-f]{7}', re.IGNORECASE)
DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
class StoppableThread(object):
"""
Provide a Thread-like class which can be 'cancelled' via a subclass-provided
cancellation method.
Can be started and stopped multiple times.
Isn't an instance of type Thread because Python Thread objects can only be run once
"""
def __init__(self):
self._thread = None
@property
def alive(self):
"""
Is 'alive' whenever the internal thread object exists
"""
return self._thread is not None
def start(self):
if self._thread is None:
self._thread = threading.Thread(target=self._run_outer)
self._thread.start()
def _cancel(self):
pass # override to provide cancellation functionality
def run(self):
pass # override for the main thread behaviour
def _run_outer(self):
try:
self.run()
finally:
self._thread = None
def stop(self):
if self._thread is not None:
old_thread = self._thread
self._thread = None
self._cancel()
old_thread.join()
class ConsoleReader(StoppableThread):
""" Read input keys from the console and push them to the queue,
until stopped.
"""
def __init__(self, console, event_queue):
super(ConsoleReader, self).__init__()
self.console = console
self.event_queue = event_queue
def run(self):
self.console.setup()
try:
while self.alive:
try:
if os.name == 'nt':
# Windows kludge: because the console.cancel() method doesn't
# seem to work to unblock getkey() on the Windows implementation.
#
# So we only call getkey() if we know there's a key waiting for us.
import msvcrt
while not msvcrt.kbhit() and self.alive:
time.sleep(0.1)
if not self.alive:
break
c = self.console.getkey()
except KeyboardInterrupt:
c = '\x03'
if c is not None:
self.event_queue.put((TAG_KEY, c), False)
finally:
self.console.cleanup()
def _cancel(self):
if os.name == 'posix':
# this is the way cancel() is implemented in pyserial 3.3 or newer,
# older pyserial (3.1+) has cancellation implemented via 'select',
# which does not work when console sends an escape sequence response
#
# even older pyserial (<3.1) does not have this method
#
# on Windows there is a different (also hacky) fix, applied above.
#
# note that TIOCSTI is not implemented in WSL / bash-on-Windows.
# TODO: introduce some workaround to make it work there.
import fcntl, termios
fcntl.ioctl(self.console.fd, termios.TIOCSTI, b'\0')
class SerialReader(StoppableThread):
""" Read serial data from the serial port and push to the
event queue, until stopped.
"""
def __init__(self, serial, event_queue):
super(SerialReader, self).__init__()
self.baud = serial.baudrate
self.serial = serial
self.event_queue = event_queue
if not hasattr(self.serial, 'cancel_read'):
# enable timeout for checking alive flag,
# if cancel_read not available
self.serial.timeout = 0.25
def run(self):
if not self.serial.is_open:
self.serial.baudrate = self.baud
self.serial.rts = True # Force an RTS reset on open
self.serial.open()
self.serial.rts = False
try:
while self.alive:
data = self.serial.read(self.serial.in_waiting or 1)
if len(data):
self.event_queue.put((TAG_SERIAL, data), False)
finally:
self.serial.close()
def _cancel(self):
if hasattr(self.serial, 'cancel_read'):
try:
self.serial.cancel_read()
except:
pass
class Monitor(object):
"""
Monitor application main class.
This was originally derived from miniterm.Miniterm, but it turned out to be easier to write from scratch for this
purpose.
Main difference is that all event processing happens in the main thread, not the worker threads.
"""
def __init__(self, serial_instance, elf_file, make="make", toolchain_prefix=DEFAULT_TOOLCHAIN_PREFIX, eol="CRLF"):
super(Monitor, self).__init__()
self.event_queue = queue.Queue()
self.console = miniterm.Console()
if os.name == 'nt':
sys.stderr = ANSIColorConverter(sys.stderr)
self.console.output = ANSIColorConverter(self.console.output)
self.console.byte_output = ANSIColorConverter(self.console.byte_output)
if StrictVersion(serial.VERSION) < StrictVersion('3.3.0'):
# Use Console.getkey implementation from 3.3.0 (to be in sync with the ConsoleReader._cancel patch above)
def getkey_patched(self):
c = self.enc_stdin.read(1)
if c == unichr(0x7f):
c = unichr(8) # map the BS key (which yields DEL) to backspace
return c
self.console.getkey = types.MethodType(getkey_patched, self.console)
self.serial = serial_instance
self.console_reader = ConsoleReader(self.console, self.event_queue)
self.serial_reader = SerialReader(self.serial, self.event_queue)
self.elf_file = elf_file
self.make = make
self.toolchain_prefix = toolchain_prefix
self.menu_key = CTRL_T
self.exit_key = CTRL_RBRACKET
self.translate_eol = {
"CRLF": lambda c: c.replace(b"\n", b"\r\n"),
"CR": lambda c: c.replace(b"\n", b"\r"),
"LF": lambda c: c.replace(b"\r", b"\n"),
}[eol]
# internal state
self._pressed_menu_key = False
self._read_line = b""
self._gdb_buffer = b""
def main_loop(self):
self.console_reader.start()
self.serial_reader.start()
try:
while self.console_reader.alive and self.serial_reader.alive:
(event_tag, data) = self.event_queue.get()
if event_tag == TAG_KEY:
self.handle_key(data)
elif event_tag == TAG_SERIAL:
self.handle_serial_input(data)
else:
raise RuntimeError("Bad event data %r" % ((event_tag,data),))
finally:
try:
self.console_reader.stop()
self.serial_reader.stop()
except:
pass
sys.stderr.write(ANSI_NORMAL + "\n")
def handle_key(self, key):
if self._pressed_menu_key:
self.handle_menu_key(key)
self._pressed_menu_key = False
elif key == self.menu_key:
self._pressed_menu_key = True
elif key == self.exit_key:
self.console_reader.stop()
self.serial_reader.stop()
else:
try:
key = self.translate_eol(key)
self.serial.write(codecs.encode(key))
except serial.SerialException:
pass # this shouldn't happen, but sometimes port has closed in serial thread
def handle_serial_input(self, data):
# this may need to be made more efficient, as it pushes out a byte
# at a time to the console
for b in data:
self.console.write_bytes(b)
if b == b'\n': # end of line
self.handle_serial_input_line(self._read_line.strip())
self._read_line = b""
else:
self._read_line += b
self.check_gdbstub_trigger(b)
def handle_serial_input_line(self, line):
for m in re.finditer(MATCH_PCADDR, line):
self.lookup_pc_address(m.group())
def handle_menu_key(self, c):
if c == self.exit_key or c == self.menu_key: # send verbatim
self.serial.write(codecs.encode(c))
elif c in [ CTRL_H, 'h', 'H', '?' ]:
red_print(self.get_help_text())
elif c == CTRL_R: # Reset device via RTS
self.serial.setRTS(True)
time.sleep(0.2)
self.serial.setRTS(False)
elif c == CTRL_F: # Recompile & upload
self.run_make("flash")
elif c == CTRL_A: # Recompile & upload app only
self.run_make("app-flash")
else:
red_print('--- unknown menu character {} --'.format(key_description(c)))
def get_help_text(self):
return """
--- idf_monitor ({version}) - ESP-IDF monitor tool
--- based on miniterm from pySerial
---
--- {exit:8} Exit program
--- {menu:8} Menu escape key, followed by:
--- Menu keys:
--- {menu:7} Send the menu character itself to remote
--- {exit:7} Send the exit character itself to remote
--- {reset:7} Reset target board via RTS line
--- {make:7} Run 'make flash' to build & flash
--- {appmake:7} Run 'make app-flash to build & flash app
""".format(version=__version__,
exit=key_description(self.exit_key),
menu=key_description(self.menu_key),
reset=key_description(CTRL_R),
make=key_description(CTRL_F),
appmake=key_description(CTRL_A),
)
def __enter__(self):
""" Use 'with self' to temporarily disable monitoring behaviour """
self.serial_reader.stop()
self.console_reader.stop()
def __exit__(self, *args, **kwargs):
""" Use 'with self' to temporarily disable monitoring behaviour """
self.console_reader.start()
self.serial_reader.start()
def prompt_next_action(self, reason):
self.console.setup() # set up console to trap input characters
try:
red_print("""
--- {}
--- Press {} to exit monitor.
--- Press {} to run 'make flash'.
--- Press {} to run 'make app-flash'.
--- Press any other key to resume monitor (resets target).""".format(reason,
key_description(self.exit_key),
key_description(CTRL_F),
key_description(CTRL_A)))
k = CTRL_T # ignore CTRL-T here, so people can muscle-memory Ctrl-T Ctrl-F, etc.
while k == CTRL_T:
k = self.console.getkey()
finally:
self.console.cleanup()
if k == self.exit_key:
self.event_queue.put((TAG_KEY, k))
elif k in [ CTRL_F, CTRL_A ]:
self.event_queue.put((TAG_KEY, self.menu_key))
self.event_queue.put((TAG_KEY, k))
def run_make(self, target):
with self:
yellow_print("Running make %s..." % target)
p = subprocess.Popen([self.make,
target ])
try:
p.wait()
except KeyboardInterrupt:
p.wait()
if p.returncode != 0:
self.prompt_next_action("Build failed")
def lookup_pc_address(self, pc_addr):
translation = subprocess.check_output(
["%saddr2line" % self.toolchain_prefix,
"-pfia", "-e", self.elf_file, pc_addr],
cwd=".")
if not "?? ??:0" in translation:
yellow_print(translation)
def check_gdbstub_trigger(self, c):
self._gdb_buffer = self._gdb_buffer[-6:] + c # keep the last 7 characters seen
m = re.match(b"\\$(T..)#(..)", self._gdb_buffer) # look for a gdb "reason" for a break
if m is not None:
try:
chsum = sum(ord(p) for p in m.group(1)) & 0xFF
calc_chsum = int(m.group(2), 16)
except ValueError:
return # payload wasn't valid hex digits
if chsum == calc_chsum:
self.run_gdb()
else:
red_print("Malformed gdb message... calculated checksum %02x received %02x" % (chsum, calc_chsum))
def run_gdb(self):
with self: # disable console control
sys.stderr.write(ANSI_NORMAL)
try:
subprocess.call(["%sgdb" % self.toolchain_prefix,
"-ex", "set serial baud %d" % self.serial.baudrate,
"-ex", "target remote %s" % self.serial.port,
"-ex", "interrupt", # monitor has already parsed the first 'reason' command, need a second
self.elf_file], cwd=".")
except KeyboardInterrupt:
pass # happens on Windows, maybe other OSes
self.prompt_next_action("gdb exited")
def main():
parser = argparse.ArgumentParser("idf_monitor - a serial output monitor for esp-idf")
parser.add_argument(
'--port', '-p',
help='Serial port device',
default=os.environ.get('ESPTOOL_PORT', '/dev/ttyUSB0')
)
parser.add_argument(
'--baud', '-b',
help='Serial port baud rate',
type=int,
default=os.environ.get('MONITOR_BAUD', 115200))
parser.add_argument(
'--make', '-m',
help='Command to run make',
type=str, default='make')
parser.add_argument(
'--toolchain-prefix',
help="Triplet prefix to add before cross-toolchain names",
default=DEFAULT_TOOLCHAIN_PREFIX)
parser.add_argument(
"--eol",
choices=['CR', 'LF', 'CRLF'],
type=lambda c: c.upper(),
help="End of line to use when sending to the serial port",
default='CR')
parser.add_argument(
'elf_file', help='ELF file of application',
type=argparse.FileType('rb'))
args = parser.parse_args()
if args.port.startswith("/dev/tty."):
args.port = args.port.replace("/dev/tty.", "/dev/cu.")
yellow_print("--- WARNING: Serial ports accessed as /dev/tty.* will hang gdb if launched.")
yellow_print("--- Using %s instead..." % args.port)
serial_instance = serial.serial_for_url(args.port, args.baud,
do_not_open=True)
serial_instance.dtr = False
serial_instance.rts = False
args.elf_file.close() # don't need this as a file
# remove the parallel jobserver arguments from MAKEFLAGS, as any
# parent make is only running 1 job (monitor), so we can re-spawn
# all of the child makes we need (the -j argument remains part of
# MAKEFLAGS)
try:
makeflags = os.environ["MAKEFLAGS"]
makeflags = re.sub(r"--jobserver[^ =]*=[0-9,]+ ?", "", makeflags)
os.environ["MAKEFLAGS"] = makeflags
except KeyError:
pass # not running a make jobserver
monitor = Monitor(serial_instance, args.elf_file.name, args.make, args.toolchain_prefix, args.eol)
yellow_print('--- idf_monitor on {p.name} {p.baudrate} ---'.format(
p=serial_instance))
yellow_print('--- Quit: {} | Menu: {} | Help: {} followed by {} ---'.format(
key_description(monitor.exit_key),
key_description(monitor.menu_key),
key_description(monitor.menu_key),
key_description(CTRL_H)))
monitor.main_loop()
if os.name == 'nt':
# Windows console stuff
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# wincon.h values
FOREGROUND_INTENSITY = 8
FOREGROUND_GREY = 7
# matches the ANSI color change sequences that IDF sends
RE_ANSI_COLOR = re.compile(b'\033\\[([01]);3([0-7])m')
# list mapping the 8 ANSI colors (the indexes) to Windows Console colors
ANSI_TO_WINDOWS_COLOR = [ 0, 4, 2, 6, 1, 5, 3, 7 ]
GetStdHandle = ctypes.windll.kernel32.GetStdHandle
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
class ANSIColorConverter(object):
"""Class to wrap a file-like output stream, intercept ANSI color codes,
and convert them into calls to Windows SetConsoleTextAttribute.
Doesn't support all ANSI terminal code escape sequences, only the sequences IDF uses.
Ironically, in Windows this console output is normally wrapped by winpty which will then detect the console text
color changes and convert these back to ANSI color codes for MSYS' terminal to display. However this is the
least-bad working solution, as winpty doesn't support any "passthrough" mode for raw output.
"""
def __init__(self, output):
self.output = output
self.handle = GetStdHandle(STD_ERROR_HANDLE if self.output == sys.stderr else STD_OUTPUT_HANDLE)
self.matched = b''
def write(self, data):
for b in data:
l = len(self.matched)
if b == '\033': # ESC
self.matched = b
elif (l == 1 and b == '[') or (1 < l < 7):
self.matched += b
if self.matched == ANSI_NORMAL: # reset console
SetConsoleTextAttribute(self.handle, FOREGROUND_GREY)
self.matched = b''
elif len(self.matched) == 7: # could be an ANSI sequence
m = re.match(RE_ANSI_COLOR, self.matched)
if m is not None:
color = ANSI_TO_WINDOWS_COLOR[int(m.group(2))]
if m.group(1) == b'1':
color |= FOREGROUND_INTENSITY
SetConsoleTextAttribute(self.handle, color)
else:
self.output.write(self.matched) # not an ANSI color code, display verbatim
self.matched = b''
else:
self.output.write(b)
self.matched = b''
def flush(self):
self.output.flush()
if __name__ == "__main__":
main()
| apache-2.0 | 4,969,928,485,006,078,000 | 6,801,309,425,222,270,000 | 35.459507 | 123 | 0.565986 | false |
zicklag/godot | doc/tools/makerst.py | 4 | 16059 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import sys
import os
import xml.etree.ElementTree as ET
input_list = []
for arg in sys.argv[1:]:
if arg.endswith(os.sep):
arg = arg[:-1]
input_list.append(arg)
if len(input_list) < 1:
print('usage: makerst.py <path to folders> and/or <path to .xml files> (order of arguments irrelevant)')
print('example: makerst.py "../../modules/" "../classes" path_to/some_class.xml')
sys.exit(0)
def validate_tag(elem, tag):
if elem.tag != tag:
print("Tag mismatch, expected '" + tag + "', got " + elem.tag)
sys.exit(255)
class_names = []
classes = {}
def ul_string(str, ul):
str += "\n"
for i in range(len(str) - 1):
str += ul
str += "\n"
return str
def make_class_list(class_list, columns):
f = codecs.open('class_list.rst', 'wb', 'utf-8')
prev = 0
col_max = len(class_list) / columns + 1
print(('col max is ', col_max))
col_count = 0
row_count = 0
last_initial = ''
fit_columns = []
for n in range(0, columns):
fit_columns += [[]]
indexers = []
last_initial = ''
idx = 0
for n in class_list:
col = idx / col_max
if col >= columns:
col = columns - 1
fit_columns[col] += [n]
idx += 1
if n[:1] != last_initial:
indexers += [n]
last_initial = n[:1]
row_max = 0
f.write("\n")
for n in range(0, columns):
if len(fit_columns[n]) > row_max:
row_max = len(fit_columns[n])
f.write("| ")
for n in range(0, columns):
f.write(" | |")
f.write("\n")
f.write("+")
for n in range(0, columns):
f.write("--+-------+")
f.write("\n")
for r in range(0, row_max):
s = '+ '
for c in range(0, columns):
if r >= len(fit_columns[c]):
continue
classname = fit_columns[c][r]
initial = classname[0]
if classname in indexers:
s += '**' + initial + '** | '
else:
s += ' | '
s += '[' + classname + '](class_' + classname.lower() + ') | '
s += '\n'
f.write(s)
for n in range(0, columns):
f.write("--+-------+")
f.write("\n")
def rstize_text(text, cclass):
# Linebreak + tabs in the XML should become two line breaks unless in a "codeblock"
pos = 0
while True:
pos = text.find('\n', pos)
if pos == -1:
break
pre_text = text[:pos]
while text[pos + 1] == '\t':
pos += 1
post_text = text[pos + 1:]
# Handle codeblocks
if post_text.startswith("[codeblock]"):
end_pos = post_text.find("[/codeblock]")
if end_pos == -1:
sys.exit("ERROR! [codeblock] without a closing tag!")
code_text = post_text[len("[codeblock]"):end_pos]
post_text = post_text[end_pos:]
# Remove extraneous tabs
code_pos = 0
while True:
code_pos = code_text.find('\n', code_pos)
if code_pos == -1:
break
to_skip = 0
while code_pos + to_skip + 1 < len(code_text) and code_text[code_pos + to_skip + 1] == '\t':
to_skip += 1
if len(code_text[code_pos + to_skip + 1:]) == 0:
code_text = code_text[:code_pos] + "\n"
code_pos += 1
else:
code_text = code_text[:code_pos] + "\n " + code_text[code_pos + to_skip + 1:]
code_pos += 5 - to_skip
text = pre_text + "\n[codeblock]" + code_text + post_text
pos += len("\n[codeblock]" + code_text)
# Handle normal text
else:
text = pre_text + "\n\n" + post_text
pos += 2
# Escape * character to avoid interpreting it as emphasis
pos = 0
while True:
pos = text.find('*', pos)
if pos == -1:
break
text = text[:pos] + "\*" + text[pos + 1:]
pos += 2
# Escape _ character at the end of a word to avoid interpreting it as an inline hyperlink
pos = 0
while True:
pos = text.find('_', pos)
if pos == -1:
break
if not text[pos + 1].isalnum(): # don't escape within a snake_case word
text = text[:pos] + "\_" + text[pos + 1:]
pos += 2
else:
pos += 1
# Handle [tags]
inside_code = False
pos = 0
while True:
pos = text.find('[', pos)
if pos == -1:
break
endq_pos = text.find(']', pos + 1)
if endq_pos == -1:
break
pre_text = text[:pos]
post_text = text[endq_pos + 1:]
tag_text = text[pos + 1:endq_pos]
escape_post = False
if tag_text in class_names:
tag_text = make_type(tag_text)
escape_post = True
else: # command
cmd = tag_text
space_pos = tag_text.find(' ')
if cmd == '/codeblock':
tag_text = ''
inside_code = False
# Strip newline if the tag was alone on one
if pre_text[-1] == '\n':
pre_text = pre_text[:-1]
elif cmd == '/code':
tag_text = '``'
inside_code = False
elif inside_code:
tag_text = '[' + tag_text + ']'
elif cmd.find('html') == 0:
cmd = tag_text[:space_pos]
param = tag_text[space_pos + 1:]
tag_text = param
elif cmd.find('method') == 0 or cmd.find('member') == 0 or cmd.find('signal') == 0:
cmd = tag_text[:space_pos]
param = tag_text[space_pos + 1:]
if param.find('.') != -1:
(class_param, method_param) = param.split('.')
tag_text = ':ref:`' + class_param + '.' + method_param + '<class_' + class_param + '_' + method_param + '>`'
else:
tag_text = ':ref:`' + param + '<class_' + cclass + "_" + param + '>`'
escape_post = True
elif cmd.find('image=') == 0:
tag_text = "" # ''
elif cmd.find('url=') == 0:
tag_text = ':ref:`' + cmd[4:] + '<' + cmd[4:] + ">`"
elif cmd == '/url':
tag_text = ''
escape_post = True
elif cmd == 'center':
tag_text = ''
elif cmd == '/center':
tag_text = ''
elif cmd == 'codeblock':
tag_text = '\n::\n'
inside_code = True
elif cmd == 'br':
# Make a new paragraph instead of a linebreak, rst is not so linebreak friendly
tag_text = '\n\n'
# Strip potential leading spaces
while post_text[0] == ' ':
post_text = post_text[1:]
elif cmd == 'i' or cmd == '/i':
tag_text = '*'
elif cmd == 'b' or cmd == '/b':
tag_text = '**'
elif cmd == 'u' or cmd == '/u':
tag_text = ''
elif cmd == 'code':
tag_text = '``'
inside_code = True
else:
tag_text = make_type(tag_text)
escape_post = True
# Properly escape things like `[Node]s`
if escape_post and post_text and post_text[0].isalnum(): # not punctuation, escape
post_text = '\ ' + post_text
text = pre_text + tag_text + post_text
pos = len(pre_text) + len(tag_text)
return text
def make_type(t):
global class_names
if t in class_names:
return ':ref:`' + t + '<class_' + t.lower() + '>`'
return t
def make_method(
f,
name,
m,
declare,
cname,
event=False,
pp=None
):
if (declare or pp == None):
t = '- '
else:
t = ""
ret_type = 'void'
args = list(m)
mdata = {}
mdata['argidx'] = []
for a in args:
if a.tag == 'return':
idx = -1
elif a.tag == 'argument':
idx = int(a.attrib['index'])
else:
continue
mdata['argidx'].append(idx)
mdata[idx] = a
if not event:
if -1 in mdata['argidx']:
t += make_type(mdata[-1].attrib['type'])
else:
t += 'void'
t += ' '
if declare or pp == None:
s = '**' + m.attrib['name'] + '** '
else:
s = ':ref:`' + m.attrib['name'] + '<class_' + cname + "_" + m.attrib['name'] + '>` '
s += '**(**'
argfound = False
for a in mdata['argidx']:
arg = mdata[a]
if a < 0:
continue
if a > 0:
s += ', '
else:
s += ' '
s += make_type(arg.attrib['type'])
if 'name' in arg.attrib:
s += ' ' + arg.attrib['name']
else:
s += ' arg' + str(a)
if 'default' in arg.attrib:
s += '=' + arg.attrib['default']
s += ' **)**'
if 'qualifiers' in m.attrib:
s += ' ' + m.attrib['qualifiers']
if (not declare):
if (pp != None):
pp.append((t, s))
else:
f.write("- " + t + " " + s + "\n")
else:
f.write(t + s + "\n")
def make_heading(title, underline):
return title + '\n' + underline * len(title) + "\n\n"
def make_rst_class(node):
name = node.attrib['name']
f = codecs.open("class_" + name.lower() + '.rst', 'wb', 'utf-8')
# Warn contributors not to edit this file directly
f.write(".. Generated automatically by doc/tools/makerst.py in Godot's source tree.\n")
f.write(".. DO NOT EDIT THIS FILE, but the " + name + ".xml source instead.\n")
f.write(".. The source is found in doc/classes or modules/<name>/doc_classes.\n\n")
f.write(".. _class_" + name + ":\n\n")
f.write(make_heading(name, '='))
if 'inherits' in node.attrib:
inh = node.attrib['inherits'].strip()
f.write('**Inherits:** ')
first = True
while (inh in classes):
if (not first):
f.write(" **<** ")
else:
first = False
f.write(make_type(inh))
inode = classes[inh]
if ('inherits' in inode.attrib):
inh = inode.attrib['inherits'].strip()
else:
inh = None
f.write("\n\n")
inherited = []
for cn in classes:
c = classes[cn]
if 'inherits' in c.attrib:
if (c.attrib['inherits'].strip() == name):
inherited.append(c.attrib['name'])
if (len(inherited)):
f.write('**Inherited By:** ')
for i in range(len(inherited)):
if (i > 0):
f.write(", ")
f.write(make_type(inherited[i]))
f.write("\n\n")
if 'category' in node.attrib:
f.write('**Category:** ' + node.attrib['category'].strip() + "\n\n")
f.write(make_heading('Brief Description', '-'))
briefd = node.find('brief_description')
if briefd != None:
f.write(rstize_text(briefd.text.strip(), name) + "\n\n")
methods = node.find('methods')
if methods != None and len(list(methods)) > 0:
f.write(make_heading('Member Functions', '-'))
ml = []
for m in list(methods):
make_method(f, node.attrib['name'], m, False, name, False, ml)
longest_t = 0
longest_s = 0
for s in ml:
sl = len(s[0])
if (sl > longest_s):
longest_s = sl
tl = len(s[1])
if (tl > longest_t):
longest_t = tl
sep = "+"
for i in range(longest_s + 2):
sep += "-"
sep += "+"
for i in range(longest_t + 2):
sep += "-"
sep += "+\n"
f.write(sep)
for s in ml:
rt = s[0]
while (len(rt) < longest_s):
rt += " "
st = s[1]
while (len(st) < longest_t):
st += " "
f.write("| " + rt + " | " + st + " |\n")
f.write(sep)
f.write('\n')
events = node.find('signals')
if events != None and len(list(events)) > 0:
f.write(make_heading('Signals', '-'))
for m in list(events):
f.write(".. _class_" + name + "_" + m.attrib['name'] + ":\n\n")
make_method(f, node.attrib['name'], m, True, name, True)
f.write('\n')
d = m.find('description')
if d == None or d.text.strip() == '':
continue
f.write(rstize_text(d.text.strip(), name))
f.write("\n\n")
f.write('\n')
members = node.find('members')
if members != None and len(list(members)) > 0:
f.write(make_heading('Member Variables', '-'))
for c in list(members):
# Leading two spaces necessary to prevent breaking the <ul>
f.write(" .. _class_" + name + "_" + c.attrib['name'] + ":\n\n")
s = '- '
s += make_type(c.attrib['type']) + ' '
s += '**' + c.attrib['name'] + '**'
if c.text.strip() != '':
s += ' - ' + rstize_text(c.text.strip(), name)
f.write(s + '\n\n')
f.write('\n')
constants = node.find('constants')
if constants != None and len(list(constants)) > 0:
f.write(make_heading('Numeric Constants', '-'))
for c in list(constants):
s = '- '
s += '**' + c.attrib['name'] + '**'
if 'value' in c.attrib:
s += ' = **' + c.attrib['value'] + '**'
if c.text.strip() != '':
s += ' --- ' + rstize_text(c.text.strip(), name)
f.write(s + '\n')
f.write('\n')
descr = node.find('description')
if descr != None and descr.text.strip() != '':
f.write(make_heading('Description', '-'))
f.write(rstize_text(descr.text.strip(), name) + "\n\n")
methods = node.find('methods')
if methods != None and len(list(methods)) > 0:
f.write(make_heading('Member Function Description', '-'))
for m in list(methods):
f.write(".. _class_" + name + "_" + m.attrib['name'] + ":\n\n")
make_method(f, node.attrib['name'], m, True, name)
f.write('\n')
d = m.find('description')
if d == None or d.text.strip() == '':
continue
f.write(rstize_text(d.text.strip(), name))
f.write("\n\n")
f.write('\n')
file_list = []
for path in input_list:
if os.path.basename(path) == 'modules':
for subdir, dirs, _ in os.walk(path):
if 'doc_classes' in dirs:
doc_dir = os.path.join(subdir, 'doc_classes')
class_file_names = [f for f in os.listdir(doc_dir) if f.endswith('.xml')]
file_list += [os.path.join(doc_dir, f) for f in class_file_names]
elif not os.path.isfile(path):
file_list += [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xml')]
elif os.path.isfile(path) and path.endswith('.xml'):
file_list.append(path)
for file in file_list:
tree = ET.parse(file)
doc = tree.getroot()
if 'version' not in doc.attrib:
print("Version missing from 'doc'")
sys.exit(255)
version = doc.attrib['version']
if doc.attrib['name'] in class_names:
continue
class_names.append(doc.attrib['name'])
classes[doc.attrib['name']] = doc
class_names.sort()
# Don't make class list for Sphinx, :toctree: handles it
# make_class_list(class_names, 2)
for cn in class_names:
c = classes[cn]
make_rst_class(c)
| mit | 2,412,963,064,706,955,000 | -983,848,176,796,479,500 | 28.574586 | 128 | 0.454885 | false |
tkaitchuck/nupic | build_system/contrib/xcode-setup-install.py | 1 | 1372 | #!/usr/bin/env python
import os
import sys
import string
doClean = ('clean' in sys.argv) or ('uninstall' in sys.argv)
rootDir = os.getcwd()
buildSystemDir = os.path.join(rootDir, 'build_system')
# Generate the configure input files.
setupCmd = 'python ' + os.path.join(buildSystemDir, 'setup.py') + ' --autogen' \
+ " --win32BuildDir '$(NTAX_BUILD_DIR)'"
print 'Running command:', setupCmd
sys.stdout.flush()
retCode = os.system(setupCmd)
if retCode != 0:
print >>sys.stderr, 'setup.py failed: Error', retCode
sys.exit(1)
buildDir = os.environ['BUILT_PRODUCTS_DIR']
buildStyle = os.environ['BUILD_STYLE']
# Build the configure command.
configureCmd = os.path.join(buildSystemDir, 'contrib', 'configure.py')
configureCmd += ' --mode=%s' % buildStyle
configureCmd += ' --builddir=%s' % buildDir
print 'Running command:', configureCmd
sys.stdout.flush()
retCode = os.system(configureCmd)
if retCode != 0:
print >>sys.stderr, 'configure failed: Error', retCode
sys.exit(1)
# Build
success = True
pushd = os.getcwd()
os.chdir(buildDir)
buildCmd = os.path.join(buildSystemDir, 'contrib', 'make.py')
if doClean: buildCmd += ' clean'
print 'Running command:', buildCmd
retCode = os.system(buildCmd)
if retCode != 0:
print >>sys.stderr, 'Build failed: Error', retCode
success = False
os.chdir(pushd)
if not success:
sys.exit(1)
| gpl-3.0 | -8,816,054,124,337,672,000 | 5,472,162,527,745,089,000 | 22.254237 | 81 | 0.691691 | false |
boegel/easybuild-easyblocks | easybuild/easyblocks/d/dolfin.py | 1 | 15555 | ##
# Copyright 2009-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for DOLFIN, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import glob
import os
import re
import tempfile
from distutils.version import LooseVersion
import easybuild.tools.environment as env
import easybuild.tools.toolchain as toolchain
from easybuild.easyblocks.generic.cmakepythonpackage import CMakePythonPackage
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import change_dir, remove
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_DOLFIN(CMakePythonPackage):
"""Support for building and installing DOLFIN."""
@staticmethod
def extra_options():
extra_vars = CMakePythonPackage.extra_options()
extra_vars['separate_build_dir'][0] = True
return extra_vars
def __init__(self, *args, **kwargs):
"""Initialize class variables."""
super(EB_DOLFIN, self).__init__(*args, **kwargs)
self.boost_dir = None
self.saved_configopts = None
def configure_step(self):
"""Set DOLFIN-specific configure options and configure with CMake."""
shlib_ext = get_shared_lib_ext()
# compiler flags
cflags = os.getenv('CFLAGS')
cxxflags = os.getenv('CXXFLAGS')
fflags = os.getenv('FFLAGS')
# fix for "SEEK_SET is #defined but must not be for the C++ binding of MPI. Include mpi.h before stdio.h"
if self.toolchain.mpi_family() in [toolchain.INTELMPI, toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2]:
cflags += " -DMPICH_IGNORE_CXX_SEEK"
cxxflags += " -DMPICH_IGNORE_CXX_SEEK"
fflags += " -DMPICH_IGNORE_CXX_SEEK"
self.cfg.update('configopts', '-DCMAKE_C_FLAGS="%s"' % cflags)
self.cfg.update('configopts', '-DCMAKE_CXX_FLAGS="%s"' % cxxflags)
self.cfg.update('configopts', '-DCMAKE_Fortran_FLAGS="%s"' % fflags)
# set correct compilers to be used at runtime
self.cfg.update('configopts', '-DMPI_C_COMPILER="$MPICC"')
self.cfg.update('configopts', '-DMPI_CXX_COMPILER="$MPICXX"')
# specify MPI library
self.cfg.update('configopts', '-DMPI_COMPILER="%s"' % os.getenv('MPICC'))
if os.getenv('MPI_LIB_SHARED') and os.getenv('MPI_INC_DIR'):
self.cfg.update('configopts', '-DMPI_LIBRARY="%s"' % os.getenv('MPI_LIB_SHARED'))
self.cfg.update('configopts', '-DMPI_INCLUDE_PATH="%s"' % os.getenv('MPI_INC_DIR'))
else:
raise EasyBuildError("MPI_LIB_SHARED or MPI_INC_DIR not set, could not determine MPI-related paths.")
# save config options to reuse them later (e.g. for sanity check commands)
self.saved_configopts = self.cfg['configopts']
# make sure that required dependencies are loaded
deps = ['Boost', 'CGAL', 'ParMETIS', 'PETSc', 'Python',
'SCOTCH', 'SLEPc', 'SuiteSparse', 'Trilinos', 'zlib']
# Armadillo was replaced by Eigen in v1.3
if LooseVersion(self.version) < LooseVersion('1.3'):
deps.append('Armadillo')
else:
deps.append('Eigen')
# UFC has been integrated into FFC in v1.4, cfr. https://bitbucket.org/fenics-project/ufc-deprecated
if LooseVersion(self.version) < LooseVersion('1.4'):
deps.append('UFC')
# PLY, petsc4py, slepc4py are required since v1.5
if LooseVersion(self.version) >= LooseVersion('1.5'):
deps.extend(['petsc4py', 'PLY', 'slepc4py'])
# pybind11 is required to build Python bindings since v2018.1
if LooseVersion(self.version) >= LooseVersion('2018.1'):
deps.append('pybind11')
depsdict = {}
for dep in deps:
deproot = get_software_root(dep)
if not deproot:
raise EasyBuildError("Dependency %s not available.", dep)
else:
depsdict.update({dep: deproot})
# zlib
self.cfg.update('configopts', '-DZLIB_INCLUDE_DIR=%s' % os.path.join(depsdict['zlib'], "include"))
self.cfg.update('configopts', '-DZLIB_LIBRARY=%s' % os.path.join(depsdict['zlib'], "lib", "libz.a"))
# set correct openmp options
openmp = self.toolchain.get_flag('openmp')
self.cfg.update('configopts', '-DOpenMP_CXX_FLAGS="%s"' % openmp)
self.cfg.update('configopts', '-DOpenMP_C_FLAGS="%s"' % openmp)
# Boost config parameters
self.cfg.update('configopts', "-DBOOST_INCLUDEDIR=%s/include" % depsdict['Boost'])
self.cfg.update('configopts', "-DBoost_DEBUG=ON -DBOOST_ROOT=%s" % depsdict['Boost'])
self.boost_dir = depsdict['Boost']
# UFC and Armadillo config params
if 'UFC' in depsdict:
self.cfg.update('configopts', "-DUFC_DIR=%s" % depsdict['UFC'])
if 'Armadillo' in depsdict:
self.cfg.update('configopts', "-DARMADILLO_DIR:PATH=%s " % depsdict['Armadillo'])
# Eigen config params
if 'Eigen' in depsdict:
self.cfg.update('configopts', "-DEIGEN3_INCLUDE_DIR=%s " % os.path.join(depsdict['Eigen'], 'include'))
# specify Python paths
if LooseVersion(self.version) < LooseVersion('2018.1'):
python = depsdict['Python']
pyver = '.'.join(get_software_version('Python').split('.')[:2])
self.cfg.update('configopts', "-DPYTHON_INCLUDE_PATH=%s/include/python%s" % (python, pyver))
self.cfg.update('configopts', "-DPYTHON_LIBRARY=%s/lib/libpython%s.%s" % (python, pyver, shlib_ext))
# SuiteSparse config params
suitesparse = depsdict['SuiteSparse']
umfpack_params = [
'-DUMFPACK_DIR="%(sp)s/UMFPACK"',
'-DUMFPACK_INCLUDE_DIRS="%(sp)s/UMFPACK/include;%(sp)s/UFconfig"',
'-DAMD_DIR="%(sp)s/UMFPACK"',
'-DCHOLMOD_DIR="%(sp)s/CHOLMOD"',
'-DCHOLMOD_INCLUDE_DIRS="%(sp)s/CHOLMOD/include;%(sp)s/UFconfig"',
'-DUFCONFIG_DIR="%(sp)s/UFconfig"',
'-DCAMD_LIBRARY:PATH="%(sp)s/CAMD/lib/libcamd.a"',
'-DCCOLAMD_LIBRARY:PATH="%(sp)s/CCOLAMD/lib/libccolamd.a"',
'-DCOLAMD_LIBRARY:PATH="%(sp)s/COLAMD/lib/libcolamd.a"'
]
self.cfg.update('configopts', ' '.join(umfpack_params) % {'sp': suitesparse})
# ParMETIS and SCOTCH
self.cfg.update('configopts', '-DPARMETIS_DIR="%s"' % depsdict['ParMETIS'])
self.cfg.update('configopts', '-DSCOTCH_DIR="%s" -DSCOTCH_DEBUG:BOOL=ON' % depsdict['SCOTCH'])
# BLACS and LAPACK
self.cfg.update('configopts', '-DBLAS_LIBRARIES:PATH="%s"' % os.getenv('LIBBLAS'))
self.cfg.update('configopts', '-DLAPACK_LIBRARIES:PATH="%s"' % os.getenv('LIBLAPACK'))
# CGAL
self.cfg.update('configopts', '-DCGAL_DIR:PATH="%s"' % depsdict['CGAL'])
# PETSc
# need to specify PETSC_ARCH explicitely (env var alone is not sufficient)
for env_var in ["PETSC_DIR", "PETSC_ARCH"]:
val = os.getenv(env_var)
if val:
self.cfg.update('configopts', '-D%s=%s' % (env_var, val))
# MTL4
if 'MTL4' in depsdict:
self.cfg.update('configopts', '-DMTL4_DIR:PATH="%s"' % depsdict['MTL4'])
# SUNDIALS
if 'SUNDIALS' in depsdict:
self.cfg.update('configopts', '-DSUNDIALS_DIR:PATH="%s"' % depsdict['SUNDIALS'])
# configure
out = super(EB_DOLFIN, self).configure_step()
# make sure that all optional packages are found
not_found_re = re.compile("The following optional packages could not be found")
if not_found_re.search(out):
raise EasyBuildError("Optional packages could not be found, this should not happen...")
# enable verbose build, so we have enough information if something goes wrong
self.cfg.update('buildopts', "VERBOSE=1")
def test_step(self):
"""Run DOLFIN demos by means of test."""
if self.cfg['runtest']:
# set cache/error dirs for Instant
tmpdir = tempfile.mkdtemp()
instant_cache_dir = os.path.join(tmpdir, '.instant', 'cache')
instant_error_dir = os.path.join(tmpdir, '.instant', 'error')
try:
os.makedirs(instant_cache_dir)
os.makedirs(instant_error_dir)
except OSError as err:
raise EasyBuildError("Failed to create Instant cache/error dirs: %s", err)
env_vars = [
('INSTANT_CACHE_DIR', instant_cache_dir),
('INSTANT_ERROR_DIR', instant_error_dir),
]
env_var_cmds = ' && '.join(['export %s="%s"' % (var, val) for (var, val) in env_vars])
cpp_cmds = [
env_var_cmds,
"cd %(dir)s",
]
if LooseVersion(self.version) < LooseVersion('1.1'):
cpp_cmds.append("cmake . %s" % self.saved_configopts)
cpp_cmds.extend([
"make VERBOSE=1",
"./demo_%(name)s",
"cd -",
])
cmd_template_cpp = " && ".join(cpp_cmds)
# list based on demos available for DOLFIN v1.0.0
pde_demos = ['biharmonic', 'cahn-hilliard', 'hyperelasticity', 'mixed-poisson',
'navier-stokes', 'poisson', 'stokes-iterative']
if LooseVersion(self.version) < LooseVersion('1.1'):
demos = [os.path.join('demo', 'la', 'eigenvalue')] + [os.path.join('demo', 'pde', x) for x in pde_demos]
else:
# verified with v1.6.0
demos = [os.path.join('demo', 'documented', x) for x in pde_demos]
# construct commands
cmds = [tmpl % {'dir': os.path.join(d, subdir), 'name': os.path.basename(d)}
for d in demos for (tmpl, subdir) in [(cmd_template_cpp, 'cpp')]]
# exclude Python tests for now, because they 'hang' sometimes (unclear why)
# they can be reinstated once run_cmd (or its equivalent) has support for timeouts
# see https://github.com/easybuilders/easybuild-framework/issues/581
# test command templates
# cmd_template_python = " && ".join([
# env_var_cmds,
# "cd %(dir)s",
# "python demo_%(name)s.py",
# "cd -",
# ])
# for (tmpl, subdir) in [(cmd_template_python, 'python'), (cmd_template_cpp, 'cpp')]
# subdomains-poisson has no C++ get_version, only Python
# Python tests excluded, see above
# name = 'subdomains-poisson'
# path = os.path.join('demo', 'pde', name, 'python')
# cmds += [cmd_template_python % {'dir': path, 'name': name}]
# supply empty argument to each command
for cmd in cmds:
run_cmd(cmd, log_all=True)
# clean up temporary dir
remove(tmpdir)
def install_step(self):
"""Custom install procedure for DOLFIN: also install Python bindings."""
super(EB_DOLFIN, self).install_step()
# avoid that pip (ab)uses $HOME/.cache/pip
# cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching
env.setvar('XDG_CACHE_HOME', tempfile.gettempdir())
self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME'])
if LooseVersion(self.version) >= LooseVersion('2018.1'):
# see https://bitbucket.org/fenics-project/dolfin/issues/897/switch-from-swig-to-pybind11-for-python
# and https://github.com/FEniCS/dolfin/blob/master/python/README.rst
cwd = change_dir(os.path.join(self.start_dir, 'python'))
env.setvar('CMAKE_PREFIX_PATH', self.installdir)
env.setvar('PYBIND11_DIR', get_software_root('pybind11'))
run_cmd("pip install --prefix %s ." % self.installdir)
change_dir(cwd)
def post_install_step(self):
"""Post install actions: extend RPATH paths in .so libraries part of the DOLFIN Python package."""
if LooseVersion(self.version) >= LooseVersion('1.1'):
# cfr. https://github.com/hashdist/hashstack/blob/master/pkgs/dolfin/dolfin.yaml (look for patchelf)
# determine location of libdolfin.so
dolfin_lib = 'libdolfin.so'
dolfin_libdir = None
for libdir in ['lib', 'lib64']:
if os.path.exists(os.path.join(self.installdir, libdir, dolfin_lib)):
dolfin_libdir = os.path.join(self.installdir, libdir)
break
if dolfin_libdir is None:
raise EasyBuildError("Failed to locate %s", dolfin_lib)
for pylibdir in self.all_pylibdirs:
libs = glob.glob(os.path.join(self.installdir, pylibdir, 'dolfin', 'cpp', '_*.so'))
for lib in libs:
out, _ = run_cmd("patchelf --print-rpath %s" % lib, simple=False, log_all=True)
curr_rpath = out.strip()
cmd = "patchelf --set-rpath '%s:%s' %s" % (curr_rpath, dolfin_libdir, lib)
run_cmd(cmd, log_all=True)
def make_module_extra(self):
"""Set extra environment variables for DOLFIN."""
txt = super(EB_DOLFIN, self).make_module_extra()
# Dolfin needs to find Boost
# check whether boost_dir is defined for compatibility with --module-only
if self.boost_dir:
txt += self.module_generator.set_environment('BOOST_DIR', self.boost_dir)
envvars = ['I_MPI_CXX', 'I_MPI_CC']
for envvar in envvars:
envar_val = os.getenv(envvar)
# if environment variable is set, also set it in module
if envar_val:
txt += self.module_generator.set_environment(envvar, envar_val)
return txt
def sanity_check_step(self):
"""Custom sanity check for DOLFIN."""
# custom sanity check paths
custom_paths = {
'files': ['bin/dolfin-%s' % x for x in ['version', 'convert', 'order', 'plot']] + ['include/dolfin.h'],
'dirs': ['%s/dolfin' % self.pylibdir],
}
super(EB_DOLFIN, self).sanity_check_step(custom_paths=custom_paths)
| gpl-2.0 | 69,954,367,247,322,330 | 6,015,095,828,320,108,000 | 42.328691 | 120 | 0.595821 | false |
cpollard1001/FreeCAD_sf_master | src/Mod/TemplatePyMod/MengerSponge.py | 27 | 3238 | # Script to create a Menger sponge
# (c) 2012 Werner Mayer LGPL
# The script is based on the work of daxmick at
# http://forum.freecadweb.org/viewtopic.php?f=3&t=2307
import threading
import Mesh, MeshGui
from FreeCAD import Base
# Create a global mesh and make copies of them
# This makes the algorithm faster by ~60%.
box = Mesh.createBox(1,1,1)
# Create a Box and Place it a coords (x,y,z)
def PlaceBox(x,y,z):
global box
mbox=box.copy()
mbox.translate(x,y,z)
return mbox
def Sierpinski(level,x0,y0,z0):
#print threading.current_thread().name
boxnums = pow(3,level)
thirds = boxnums / 3
twothirds = thirds * 2
if(level == 0):
rangerx = [x0]
rangery = [y0]
rangerz = [z0]
else:
rangerx = [ x0, x0 + thirds, x0 + twothirds ]
rangery = [ y0, y0 + thirds, y0 + twothirds ]
rangerz = [ z0, z0 + thirds, z0 + twothirds ]
block = 1
skip=[5,11,13,14,15,17,23]
mesh=Mesh.Mesh()
for i in rangerx:
for j in rangery:
for k in rangerz:
if block not in skip:
if(level > 0):
mesh.addMesh(Sierpinski(level-1,i,j,k))
else:
mesh.addMesh(PlaceBox(i,j,k))
block+=1
return mesh
### Multi-threaded ###
class MengerThread(threading.Thread):
def __init__(self,args):
self.args=args
self.mesh=Mesh.Mesh()
threading.Thread.__init__(self)
def run(self):
for i in self.args:
self.mesh.addMesh(Sierpinski(*i))
def makeMengerSponge_mt(level=3,x0=0,y0=0,z0=0):
"""
Is much slower than makeMengerSponge!!! :(
"""
if level == 0:
mesh=Sierpinski(level,x0,y0,z0)
Mesh.show(mesh)
return
boxnums = pow(3,level)
thirds = boxnums / 3
twothirds = thirds * 2
rangerx = [ x0, x0 + thirds, x0 + twothirds ]
rangery = [ y0, y0 + thirds, y0 + twothirds ]
rangerz = [ z0, z0 + thirds, z0 + twothirds ]
block = 1
skip=[5,11,13,14,15,17,23]
# collect the arguments for the algorithm in a list
args=[]
for i in rangerx:
for j in rangery:
for k in rangerz:
if block not in skip:
args.append((level-1,i,j,k))
block+=1
numJobs = 4
threads=[]
while numJobs > 0:
size = len(args)
count = size / numJobs
numJobs-=1
thr=MengerThread(args[:count])
threads.append(thr)
args=args[count:]
print "Number of threads: %i" % (len(threads))
for thr in threads:
thr.start()
for thr in threads:
thr.join()
mesh=Mesh.Mesh()
for thr in threads:
mesh.addMesh(thr.mesh)
del thr.mesh
print mesh
mesh.removeDuplicatedPoints()
mesh.removeFacets(mesh.getInternalFacets())
mesh.rebuildNeighbourHood()
print "Mesh is solid: %s" % (mesh.isSolid())
Mesh.show(mesh)
### Single-threaded ###
def makeMengerSponge(level=3,x0=0,y0=0,z0=0):
mesh=Sierpinski(level,x0,y0,z0)
mesh.removeDuplicatedPoints()
mesh.removeFacets(mesh.getInternalFacets())
mesh.rebuildNeighbourHood()
print "Mesh is solid: %s" % (mesh.isSolid())
Mesh.show(mesh)
| lgpl-2.1 | -6,433,063,872,184,541,000 | -2,416,939,493,025,442,300 | 24.904 | 57 | 0.583076 | false |
mozbhearsum/balrog | auslib/blobs/systemaddons.py | 1 | 4440 | from auslib.AUS import isForbiddenUrl
from auslib.blobs.base import Blob
from auslib.errors import BadDataError
class SystemAddonsBlob(Blob):
jsonschema = "systemaddons.yml"
def __init__(self, **kwargs):
Blob.__init__(self, **kwargs)
if "schema_version" not in self:
self["schema_version"] = 5000
def getAddonsForPlatform(self, platform):
for v in self.get("addons", {}):
platforms = self["addons"].get(v, {}).get("platforms", {})
if platform in platforms or "default" in platforms:
yield v
def getResolvedPlatform(self, addon, platform):
platforms = self.get("addons", {}).get(addon, {}).get("platforms", {})
if platform in platforms:
return self.get("addons", {}).get(addon, {}).get("platforms", {}).get(platform, {}).get("alias", platform)
if "default" in platforms:
return "default"
raise BadDataError("No platform '%s' or default in addon '%s'", platform, addon)
def getPlatformData(self, addon, platform):
platform = self.getResolvedPlatform(addon, platform)
return self.get("addons", {}).get(addon, {}).get("platforms", {}).get(platform)
def shouldServeUpdate(self, updateQuery):
# SystemAddon updates should always be returned. It is the responsibility
# of the client to decide whether or not any action needs to be taken,
# similar to GMP
return True
# If there are are no updates, we have a special response for SystemAddons
# blobs. We return <updates></updates>, without the addons tags.
def hasUpdates(self, updateQuery, whitelistedDomains):
buildTarget = updateQuery["buildTarget"]
for addon in sorted(self.getAddonsForPlatform(buildTarget)):
# Checking if the addon update is to be served
platformData = self.getPlatformData(addon, buildTarget)
url = platformData["fileUrl"]
# There might be no updates even if we have response products if
# they are not served from whitelisted domains
if isForbiddenUrl(url, updateQuery["product"], whitelistedDomains):
continue
return True
return False
# Because specialForceHosts is only relevant to our own internal servers,
# and these type of updates are always served externally, we don't process
# them in SystemAddon blobs, similar to GMP.
def getInnerXML(self, updateQuery, update_type, whitelistedDomains, specialForceHosts):
# In case we have an uninstall blob, we won't have the addons section
if self.get("addons") is None:
return []
buildTarget = updateQuery["buildTarget"]
addonXML = []
for addon in sorted(self.getAddonsForPlatform(buildTarget)):
addonInfo = self["addons"][addon]
platformData = self.getPlatformData(addon, buildTarget)
url = platformData["fileUrl"]
if isForbiddenUrl(url, updateQuery["product"], whitelistedDomains):
continue
addonXML.append(
' <addon id="%s" URL="%s" hashFunction="%s" hashValue="%s" size="%s" version="%s"/>'
% (addon, url, self["hashFunction"], platformData["hashValue"], platformData["filesize"], addonInfo["version"])
)
return addonXML
def getInnerHeaderXML(self, updateQuery, update_type, whitelistedDomains, specialForceHosts):
if self.get("uninstall", False) or self.hasUpdates(updateQuery, whitelistedDomains):
return " <addons>"
else:
return ""
def getInnerFooterXML(self, updateQuery, update_type, whitelistedDomains, specialForceHosts):
if self.get("uninstall", False) or self.hasUpdates(updateQuery, whitelistedDomains):
return " </addons>"
else:
return ""
def containsForbiddenDomain(self, product, whitelistedDomains):
"""Returns True if the blob contains any file URLs that contain a
domain that we're not allowed to serve updates to."""
for addon in self.get("addons", {}).values():
for platform in addon.get("platforms", {}).values():
if "fileUrl" in platform:
if isForbiddenUrl(platform["fileUrl"], product, whitelistedDomains):
return True
return False
| mpl-2.0 | -1,377,177,440,817,844,000 | 4,464,723,737,473,493,500 | 43.848485 | 127 | 0.62973 | false |
coreycb/horizon | openstack_dashboard/dashboards/project/networks/subnets/tables.py | 5 | 5494 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.usage import quotas
LOG = logging.getLogger(__name__)
class CheckNetworkEditable(object):
"""Mixin class to determine the specified network is editable."""
def allowed(self, request, datum=None):
# Only administrator is allowed to create and manage subnets
# on shared networks.
network = self.table._get_network()
if network.shared:
return False
return True
class SubnetPolicyTargetMixin(policy.PolicyTargetMixin):
def get_policy_target(self, request, datum=None):
policy_target = super(SubnetPolicyTargetMixin, self)\
.get_policy_target(request, datum)
network = self.table._get_network()
# neutron switched policy target values, we'll support both
policy_target["network:tenant_id"] = network.tenant_id
policy_target["network:project_id"] = network.tenant_id
return policy_target
class DeleteSubnet(SubnetPolicyTargetMixin, CheckNetworkEditable,
tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Subnet",
u"Delete Subnets",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Subnet",
u"Deleted Subnets",
count
)
policy_rules = (("network", "delete_subnet"),)
def delete(self, request, obj_id):
try:
api.neutron.subnet_delete(request, obj_id)
except Exception:
msg = _('Failed to delete subnet %s') % obj_id
LOG.info(msg)
network_id = self.table.kwargs['network_id']
redirect = reverse('horizon:project:networks:detail',
args=[network_id])
exceptions.handle(request, msg, redirect=redirect)
class CreateSubnet(SubnetPolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "create"
verbose_name = _("Create Subnet")
url = "horizon:project:networks:addsubnet"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_subnet"),)
def get_link_url(self, datum=None):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id,))
def allowed(self, request, datum=None):
usages = quotas.tenant_quota_usages(request)
if usages['subnets']['available'] <= 0:
if 'disabled' not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = _('Create Subnet (Quota exceeded)')
else:
self.verbose_name = _('Create Subnet')
self.classes = [c for c in self.classes if c != 'disabled']
return True
class UpdateSubnet(SubnetPolicyTargetMixin, CheckNetworkEditable,
tables.LinkAction):
name = "update"
verbose_name = _("Edit Subnet")
url = "horizon:project:networks:editsubnet"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("network", "update_subnet"),)
def get_link_url(self, subnet):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id, subnet.id))
class SubnetsTable(tables.DataTable):
name = tables.WrappingColumn(
"name_or_id",
verbose_name=_("Name"),
link='horizon:project:networks:subnets:detail')
cidr = tables.Column("cidr", verbose_name=_("Network Address"))
ip_version = tables.Column("ipver_str", verbose_name=_("IP Version"))
gateway_ip = tables.Column("gateway_ip", verbose_name=_("Gateway IP"))
failure_url = reverse_lazy('horizon:project:networks:index')
@memoized.memoized_method
def _get_network(self):
try:
network_id = self.kwargs['network_id']
network = api.neutron.network_get(self.request, network_id)
network.set_id_as_name_if_empty(length=0)
except Exception:
network = None
msg = _('Unable to retrieve details for network "%s".') \
% (network_id)
exceptions.handle(self.request, msg,)
return network
class Meta(object):
name = "subnets"
verbose_name = _("Subnets")
table_actions = (CreateSubnet, DeleteSubnet, tables.FilterAction,)
row_actions = (UpdateSubnet, DeleteSubnet)
hidden_title = False
| apache-2.0 | 8,566,955,739,916,614,000 | -2,774,945,651,249,516,500 | 33.3375 | 78 | 0.639789 | false |
sanyaade-teachings/gyp | test/intermediate_dir/gyptest-intermediate-dir.py | 100 | 1400 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that targets have independent INTERMEDIATE_DIRs.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('test.gyp', chdir='src')
test.build('test.gyp', 'target1', chdir='src')
# Check stuff exists.
intermediate_file1 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
shared_intermediate_file1 = test.read('src/shared_outfile.txt')
test.must_contain(shared_intermediate_file1, 'shared_target1')
test.run_gyp('test2.gyp', chdir='src')
# Force the shared intermediate to be rebuilt.
test.sleep()
test.touch('src/shared_infile.txt')
test.build('test2.gyp', 'target2', chdir='src')
# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
# file did.
intermediate_file2 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
test.must_contain(intermediate_file2, 'target2')
shared_intermediate_file2 = test.read('src/shared_outfile.txt')
if shared_intermediate_file1 != shared_intermediate_file2:
test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
test.must_contain(shared_intermediate_file1, 'shared_target2')
test.must_contain(shared_intermediate_file2, 'shared_target2')
test.pass_test()
| bsd-3-clause | 258,478,234,130,149,760 | 1,788,669,679,785,542,700 | 30.818182 | 80 | 0.752857 | false |
HaiFangHui/cf_log_parser | models.py | 1 | 2248 | import gzip
import re
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy import Column, Integer, String, DateTime, Text, func, desc
from config import config
db_spec = config.get('DATABASE', 'DB_SPEC')
engine = create_engine(db_spec)
session = scoped_session(sessionmaker(bind=engine))
Base = declarative_base()
class LogEntry(Base):
__tablename__ = 'log_entries'
id = Column(Integer, primary_key=True)
logtime = Column(String(200))
edge = Column(String(200))
bytesent = Column(Integer)
cip = Column(String(100))
method = Column(String(20))
host = Column(String(100))
uri = Column(String(1024))
status = Column(String(10))
creferrer = Column(Text)
useragent = Column(Text)
cs_uri_query = Column(Text)
cookie = Column(Text)
x_edge_result_type = Column(Text)
x_edge_request_id = Column(Text)
x_host_header = Column(String(100))
protocol = Column(String)
cs_bytes = Column(Integer)
time_taken = Column(String)
def load_from(self, line):
fields = line.split("\t")
self.logtime = fields[0] + ' ' + fields[1]
self.edge = fields[2]
self.bytesent = fields[3]
self.cip = fields[4]
self.method = fields[5]
self.host = fields[6]
self.uri = fields[7]
self.status = fields[8]
self.creferrer = fields[9]
self.useragent = fields[10]
self.cs_uri_query = fields[11]
self.cookie = fields[12]
self.x_edge_result_type = fields[13]
self.x_edge_result_id = fields[14]
self.x_host_header = fields[15]
self.protocol = fields[16]
self.cs_bytes = fields[17]
self.time_taken = fields[18]
return self
class LogFile(Base):
__tablename__ = 'log_files'
id = Column(Integer, primary_key=True)
filename = Column(String(100), unique=True)
def parse_log_data(data):
for line in data.splitlines():
line.strip()
if re.search('^#', line):
pass
else:
log_entry = LogEntry()
log_entry.load_from(line)
session.add(log_entry)
| mit | 1,602,207,679,745,610,000 | 3,788,768,609,755,495,400 | 27.1 | 74 | 0.620107 | false |
mindnervestech/mnrp | addons/l10n_be/__openerp__.py | 50 | 3750 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Belgium - Accounting',
'version': '1.1',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Belgium in OpenERP.
==============================================================================
After installing this module, the Configuration wizard for accounting is launched.
* We have the account templates which can be helpful to generate Charts of Accounts.
* On that particular wizard, you will be asked to pass the name of the company,
the chart template to follow, the no. of digits to generate, the code for your
account and bank account, currency to create journals.
Thus, the pure copy of Chart Template is generated.
Wizards provided by this module:
--------------------------------
* Partner VAT Intra: Enlist the partners with their related VAT and invoiced
amounts. Prepares an XML file format.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Partner VAT Intra
* Periodical VAT Declaration: Prepares an XML file for Vat Declaration of
the Main company of the User currently Logged in.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Periodical VAT Declaration
* Annual Listing Of VAT-Subjected Customers: Prepares an XML file for Vat
Declaration of the Main company of the User currently Logged in Based on
Fiscal year.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Annual Listing Of VAT-Subjected Customers
""",
'author': 'Noviat & OpenERP SA',
'depends': [
'account',
'base_vat',
'base_iban',
'account_chart',
'l10n_be_coda',
'l10n_multilang',
],
'data': [
'account_financial_report.xml',
'account_pcmn_belgium.xml',
'account_tax_code_template.xml',
'account_chart_template.xml',
'account_chart_template.yml',
'account_tax_template.xml',
'wizard/l10n_be_account_vat_declaration_view.xml',
'wizard/l10n_be_vat_intra_view.xml',
'wizard/l10n_be_partner_vat_listing.xml',
'wizard/account_wizard.xml',
'l10n_be_sequence.xml',
'l10n_be_reports.xml',
'fiscal_templates.xml',
'account_fiscal_position_tax_template.xml',
'security/ir.model.access.csv',
'views/report_vatintraprint.xml',
'views/report_vatpartnerlisting.xml',
],
'demo': [],
'installable': True,
'website': 'https://www.odoo.com/page/accounting',
'images': ['images/1_config_chart_l10n_be.jpeg','images/2_l10n_be_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,506,807,697,953,861,000 | 7,560,207,964,811,362,000 | 41.134831 | 123 | 0.626667 | false |
SlimRoms/android_external_chromium_org | chrome/app/theme/PRESUBMIT.py | 121 | 1455 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium theme resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl/git cl, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
resources = input_api.os_path.join(input_api.PresubmitLocalPath(),
'../../../ui/resources')
# List of paths with their associated scale factor. This is used to verify
# that the images modified in one are the correct scale of the other.
path_scales = [
[(100, 'default_100_percent/'), (200, 'default_200_percent/')],
]
import sys
old_path = sys.path
try:
sys.path = [resources] + old_path
from resource_check import resource_scale_factors
for paths in path_scales:
results.extend(resource_scale_factors.ResourceScaleFactors(
input_api, output_api, paths).RunChecks())
finally:
sys.path = old_path
return results
| bsd-3-clause | 8,272,020,757,150,270,000 | -2,745,119,150,555,431,000 | 29.957447 | 76 | 0.720962 | false |
SlimRoms/android_external_chromium_org | chrome/common/extensions/docs/server2/availability_finder_test.py | 7 | 15139 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
import api_schema_graph
from availability_finder import AvailabilityFinder, AvailabilityInfo
from branch_utility import BranchUtility, ChannelInfo
from compiled_file_system import CompiledFileSystem
from fake_host_file_system_provider import FakeHostFileSystemProvider
from fake_url_fetcher import FakeUrlFetcher
from host_file_system_iterator import HostFileSystemIterator
from mock_function import MockFunction
from object_store_creator import ObjectStoreCreator
from test_data.canned_data import (CANNED_API_FILE_SYSTEM_DATA, CANNED_BRANCHES)
from test_data.object_level_availability.tabs import TABS_SCHEMA_BRANCHES
from test_util import Server2Path
TABS_UNMODIFIED_VERSIONS = (16, 20, 23, 24)
class AvailabilityFinderTest(unittest.TestCase):
def setUp(self):
self._branch_utility = BranchUtility(
os.path.join('branch_utility', 'first.json'),
os.path.join('branch_utility', 'second.json'),
FakeUrlFetcher(Server2Path('test_data')),
ObjectStoreCreator.ForTest())
api_fs_creator = FakeHostFileSystemProvider(CANNED_API_FILE_SYSTEM_DATA)
self._node_fs_creator = FakeHostFileSystemProvider(TABS_SCHEMA_BRANCHES)
def create_availability_finder(host_fs_creator):
test_object_store = ObjectStoreCreator.ForTest()
return AvailabilityFinder(
self._branch_utility,
CompiledFileSystem.Factory(test_object_store),
HostFileSystemIterator(host_fs_creator,
self._branch_utility),
host_fs_creator.GetTrunk(),
test_object_store)
self._avail_finder = create_availability_finder(api_fs_creator)
self._node_avail_finder = create_availability_finder(self._node_fs_creator)
# Imitate the actual SVN file system by incrementing the stats for paths
# where an API schema has changed.
last_stat = type('last_stat', (object,), {'val': 0})
def stat_paths(file_system, channel_info):
if channel_info.version not in TABS_UNMODIFIED_VERSIONS:
last_stat.val += 1
# HACK: |file_system| is a MockFileSystem backed by a TestFileSystem.
# Increment the TestFileSystem stat count.
file_system._file_system.IncrementStat(by=last_stat.val)
# Continue looping. The iterator will stop after 'trunk' automatically.
return True
# Use the HostFileSystemIterator created above to change global stat values
# for the TestFileSystems that it creates.
self._node_avail_finder._file_system_iterator.Ascending(
# The earliest version represented with the tabs' test data is 13.
self._branch_utility.GetStableChannelInfo(13),
stat_paths)
def testGraphOptimization(self):
# Keep track of how many times the APISchemaGraph constructor is called.
original_constructor = api_schema_graph.APISchemaGraph
mock_constructor = MockFunction(original_constructor)
api_schema_graph.APISchemaGraph = mock_constructor
try:
# The test data includes an extra branch where the API does not exist.
num_versions = len(TABS_SCHEMA_BRANCHES) - 1
# We expect an APISchemaGraph to be created only when an API schema file
# has different stat data from the previous version's schema file.
num_graphs_created = num_versions - len(TABS_UNMODIFIED_VERSIONS)
# Run the logic for object-level availability for an API.
self._node_avail_finder.GetAPINodeAvailability('tabs')
self.assertTrue(*api_schema_graph.APISchemaGraph.CheckAndReset(
num_graphs_created))
finally:
# Ensure that the APISchemaGraph constructor is reset to be the original
# constructor.
api_schema_graph.APISchemaGraph = original_constructor
def testGetAPIAvailability(self):
# Key: Using 'channel' (i.e. 'beta') to represent an availability listing
# for an API in a _features.json file, and using |channel| (i.e. |dev|) to
# represent the development channel, or phase of development, where an API's
# availability is being checked.
# Testing APIs with predetermined availability.
self.assertEqual(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('jsonTrunkAPI'))
self.assertEqual(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('jsonDevAPI'))
self.assertEqual(
AvailabilityInfo(ChannelInfo('beta', CANNED_BRANCHES[27], 27)),
self._avail_finder.GetAPIAvailability('jsonBetaAPI'))
self.assertEqual(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[20], 20)),
self._avail_finder.GetAPIAvailability('jsonStableAPI'))
# Testing a whitelisted API.
self.assertEquals(
AvailabilityInfo(ChannelInfo('beta', CANNED_BRANCHES[27], 27)),
self._avail_finder.GetAPIAvailability('declarativeWebRequest'))
# Testing APIs found only by checking file system existence.
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[23], 23)),
self._avail_finder.GetAPIAvailability('windows'))
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[18], 18)),
self._avail_finder.GetAPIAvailability('tabs'))
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[18], 18)),
self._avail_finder.GetAPIAvailability('input.ime'))
# Testing API channel existence for _api_features.json.
# Listed as 'dev' on |beta|, 'dev' on |dev|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('systemInfo.stuff'))
# Listed as 'stable' on |beta|.
self.assertEquals(
AvailabilityInfo(
ChannelInfo('beta', CANNED_BRANCHES[27], 27),
scheduled=28),
self._avail_finder.GetAPIAvailability('systemInfo.cpu'))
# Testing API channel existence for _manifest_features.json.
# Listed as 'trunk' on all channels.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('sync'))
# No records of API until |trunk|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('history'))
# Listed as 'dev' on |dev|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('storage'))
# Stable in _manifest_features and into pre-18 versions.
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[8], 8)),
self._avail_finder.GetAPIAvailability('pageAction'))
# Testing API channel existence for _permission_features.json.
# Listed as 'beta' on |trunk|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('falseBetaAPI'))
# Listed as 'trunk' on |trunk|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('trunkAPI'))
# Listed as 'trunk' on all development channels.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('declarativeContent'))
# Listed as 'dev' on all development channels.
self.assertEquals(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('bluetooth'))
# Listed as 'dev' on |dev|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('cookies'))
# Treated as 'stable' APIs.
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[24], 24)),
self._avail_finder.GetAPIAvailability('alarms'))
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[21], 21)),
self._avail_finder.GetAPIAvailability('bookmarks'))
# Testing older API existence using extension_api.json.
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[6], 6)),
self._avail_finder.GetAPIAvailability('menus'))
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[5], 5)),
self._avail_finder.GetAPIAvailability('idle'))
# Switches between _features.json files across branches.
# Listed as 'trunk' on all channels, in _api, _permission, or _manifest.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('contextMenus'))
# Moves between _permission and _manifest as file system is traversed.
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[23], 23)),
self._avail_finder.GetAPIAvailability('systemInfo.display'))
self.assertEquals(
AvailabilityInfo(ChannelInfo('stable', CANNED_BRANCHES[17], 17)),
self._avail_finder.GetAPIAvailability('webRequest'))
# Mid-upgrade cases:
# Listed as 'dev' on |beta| and 'beta' on |dev|.
self.assertEquals(
AvailabilityInfo(ChannelInfo('dev', CANNED_BRANCHES[28], 28)),
self._avail_finder.GetAPIAvailability('notifications'))
# Listed as 'beta' on |stable|, 'dev' on |beta| ... until |stable| on trunk.
self.assertEquals(
AvailabilityInfo(ChannelInfo('trunk', 'trunk', 'trunk')),
self._avail_finder.GetAPIAvailability('events'))
def testGetAPINodeAvailability(self):
# Allow the LookupResult constructions below to take just one line.
lookup_result = api_schema_graph.LookupResult
availability_graph = self._node_avail_finder.GetAPINodeAvailability('tabs')
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('trunk')),
availability_graph.Lookup('tabs', 'properties',
'fakeTabsProperty3'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('dev')),
availability_graph.Lookup('tabs', 'events', 'onActivated',
'parameters', 'activeInfo', 'properties',
'windowId'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('dev')),
availability_graph.Lookup('tabs', 'events', 'onUpdated', 'parameters',
'tab'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('beta')),
availability_graph.Lookup('tabs', 'events','onActivated'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('beta')),
availability_graph.Lookup('tabs', 'functions', 'get', 'parameters',
'tabId'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('stable')),
availability_graph.Lookup('tabs', 'types', 'InjectDetails',
'properties', 'code'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetChannelInfo('stable')),
availability_graph.Lookup('tabs', 'types', 'InjectDetails',
'properties', 'file'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(25)),
availability_graph.Lookup('tabs', 'types', 'InjectDetails'))
# Nothing new in version 24 or 23.
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(22)),
availability_graph.Lookup('tabs', 'types', 'Tab', 'properties',
'windowId'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(21)),
availability_graph.Lookup('tabs', 'types', 'Tab', 'properties',
'selected'))
# Nothing new in version 20.
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(19)),
availability_graph.Lookup('tabs', 'functions', 'getCurrent'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(18)),
availability_graph.Lookup('tabs', 'types', 'Tab', 'properties',
'index'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(17)),
availability_graph.Lookup('tabs', 'events', 'onUpdated', 'parameters',
'changeInfo'))
# Nothing new in version 16.
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(15)),
availability_graph.Lookup('tabs', 'properties',
'fakeTabsProperty2'))
# Everything else is available at the API's release, version 14 here.
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(14)),
availability_graph.Lookup('tabs', 'types', 'Tab'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(14)),
availability_graph.Lookup('tabs', 'types', 'Tab',
'properties', 'url'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(14)),
availability_graph.Lookup('tabs', 'properties',
'fakeTabsProperty1'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(14)),
availability_graph.Lookup('tabs', 'functions', 'get', 'parameters',
'callback'))
self.assertEquals(
lookup_result(True, self._branch_utility.GetStableChannelInfo(14)),
availability_graph.Lookup('tabs', 'events', 'onUpdated'))
# Test things that aren't available.
self.assertEqual(lookup_result(False, None),
availability_graph.Lookup('tabs', 'types',
'UpdateInfo'))
self.assertEqual(lookup_result(False, None),
availability_graph.Lookup('tabs', 'functions', 'get',
'parameters', 'callback',
'parameters', 'tab', 'id'))
self.assertEqual(lookup_result(False, None),
availability_graph.Lookup('functions'))
self.assertEqual(lookup_result(False, None),
availability_graph.Lookup('events', 'onActivated',
'parameters', 'activeInfo',
'tabId'))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 4,297,978,974,179,599,400 | 2,729,618,714,786,381,000 | 45.725309 | 80 | 0.658102 | false |
jaimahajan1997/sympy | sympy/polys/numberfields.py | 37 | 31789 | """Computational algebraic field theory. """
from __future__ import print_function, division
from sympy import (
S, Rational, AlgebraicNumber,
Add, Mul, sympify, Dummy, expand_mul, I, pi
)
from sympy.functions.elementary.exponential import exp
from sympy.functions.elementary.trigonometric import cos, sin
from sympy.polys.polytools import (
Poly, PurePoly, sqf_norm, invert, factor_list, groebner, resultant,
degree, poly_from_expr, parallel_poly_from_expr, lcm
)
from sympy.polys.polyerrors import (
IsomorphismFailed,
CoercionFailed,
NotAlgebraic,
GeneratorsError,
)
from sympy.polys.rootoftools import CRootOf
from sympy.polys.specialpolys import cyclotomic_poly
from sympy.polys.polyutils import dict_from_expr, expr_from_dict
from sympy.polys.domains import ZZ, QQ
from sympy.polys.orthopolys import dup_chebyshevt
from sympy.polys.rings import ring
from sympy.polys.ring_series import rs_compose_add
from sympy.printing.lambdarepr import LambdaPrinter
from sympy.utilities import (
numbered_symbols, variations, lambdify, public, sift
)
from sympy.core.exprtools import Factors
from sympy.core.function import _mexpand
from sympy.simplify.radsimp import _split_gcd
from sympy.simplify.simplify import _is_sum_surds
from sympy.ntheory import sieve
from sympy.ntheory.factor_ import divisors
from mpmath import pslq, mp
from sympy.core.compatibility import reduce
from sympy.core.compatibility import range
def _choose_factor(factors, x, v, dom=QQ, prec=200, bound=5):
"""
Return a factor having root ``v``
It is assumed that one of the factors has root ``v``.
"""
if isinstance(factors[0], tuple):
factors = [f[0] for f in factors]
if len(factors) == 1:
return factors[0]
points = {x:v}
symbols = dom.symbols if hasattr(dom, 'symbols') else []
t = QQ(1, 10)
for n in range(bound**len(symbols)):
prec1 = 10
n_temp = n
for s in symbols:
points[s] = n_temp % bound
n_temp = n_temp // bound
while True:
candidates = []
eps = t**(prec1 // 2)
for f in factors:
if abs(f.as_expr().evalf(prec1, points)) < eps:
candidates.append(f)
if candidates:
factors = candidates
if len(factors) == 1:
return factors[0]
if prec1 > prec:
break
prec1 *= 2
raise NotImplementedError("multiple candidates for the minimal polynomial of %s" % v)
def _separate_sq(p):
"""
helper function for ``_minimal_polynomial_sq``
It selects a rational ``g`` such that the polynomial ``p``
consists of a sum of terms whose surds squared have gcd equal to ``g``
and a sum of terms with surds squared prime with ``g``;
then it takes the field norm to eliminate ``sqrt(g)``
See simplify.simplify.split_surds and polytools.sqf_norm.
Examples
========
>>> from sympy import sqrt
>>> from sympy.abc import x
>>> from sympy.polys.numberfields import _separate_sq
>>> p= -x + sqrt(2) + sqrt(3) + sqrt(7)
>>> p = _separate_sq(p); p
-x**2 + 2*sqrt(3)*x + 2*sqrt(7)*x - 2*sqrt(21) - 8
>>> p = _separate_sq(p); p
-x**4 + 4*sqrt(7)*x**3 - 32*x**2 + 8*sqrt(7)*x + 20
>>> p = _separate_sq(p); p
-x**8 + 48*x**6 - 536*x**4 + 1728*x**2 - 400
"""
from sympy.utilities.iterables import sift
def is_sqrt(expr):
return expr.is_Pow and expr.exp is S.Half
# p = c1*sqrt(q1) + ... + cn*sqrt(qn) -> a = [(c1, q1), .., (cn, qn)]
a = []
for y in p.args:
if not y.is_Mul:
if is_sqrt(y):
a.append((S.One, y**2))
elif y.is_Atom:
a.append((y, S.One))
elif y.is_Pow and y.exp.is_integer:
a.append((y, S.One))
else:
raise NotImplementedError
continue
sifted = sift(y.args, is_sqrt)
a.append((Mul(*sifted[False]), Mul(*sifted[True])**2))
a.sort(key=lambda z: z[1])
if a[-1][1] is S.One:
# there are no surds
return p
surds = [z for y, z in a]
for i in range(len(surds)):
if surds[i] != 1:
break
g, b1, b2 = _split_gcd(*surds[i:])
a1 = []
a2 = []
for y, z in a:
if z in b1:
a1.append(y*z**S.Half)
else:
a2.append(y*z**S.Half)
p1 = Add(*a1)
p2 = Add(*a2)
p = _mexpand(p1**2) - _mexpand(p2**2)
return p
def _minimal_polynomial_sq(p, n, x):
"""
Returns the minimal polynomial for the ``nth-root`` of a sum of surds
or ``None`` if it fails.
Parameters
==========
p : sum of surds
n : positive integer
x : variable of the returned polynomial
Examples
========
>>> from sympy.polys.numberfields import _minimal_polynomial_sq
>>> from sympy import sqrt
>>> from sympy.abc import x
>>> q = 1 + sqrt(2) + sqrt(3)
>>> _minimal_polynomial_sq(q, 3, x)
x**12 - 4*x**9 - 4*x**6 + 16*x**3 - 8
"""
from sympy.simplify.simplify import _is_sum_surds
p = sympify(p)
n = sympify(n)
r = _is_sum_surds(p)
if not n.is_Integer or not n > 0 or not _is_sum_surds(p):
return None
pn = p**Rational(1, n)
# eliminate the square roots
p -= x
while 1:
p1 = _separate_sq(p)
if p1 is p:
p = p1.subs({x:x**n})
break
else:
p = p1
# _separate_sq eliminates field extensions in a minimal way, so that
# if n = 1 then `p = constant*(minimal_polynomial(p))`
# if n > 1 it contains the minimal polynomial as a factor.
if n == 1:
p1 = Poly(p)
if p.coeff(x**p1.degree(x)) < 0:
p = -p
p = p.primitive()[1]
return p
# by construction `p` has root `pn`
# the minimal polynomial is the factor vanishing in x = pn
factors = factor_list(p)[1]
result = _choose_factor(factors, x, pn)
return result
def _minpoly_op_algebraic_element(op, ex1, ex2, x, dom, mp1=None, mp2=None):
"""
return the minimal polynomial for ``op(ex1, ex2)``
Parameters
==========
op : operation ``Add`` or ``Mul``
ex1, ex2 : expressions for the algebraic elements
x : indeterminate of the polynomials
dom: ground domain
mp1, mp2 : minimal polynomials for ``ex1`` and ``ex2`` or None
Examples
========
>>> from sympy import sqrt, Add, Mul, QQ
>>> from sympy.polys.numberfields import _minpoly_op_algebraic_element
>>> from sympy.abc import x, y
>>> p1 = sqrt(sqrt(2) + 1)
>>> p2 = sqrt(sqrt(2) - 1)
>>> _minpoly_op_algebraic_element(Mul, p1, p2, x, QQ)
x - 1
>>> q1 = sqrt(y)
>>> q2 = 1 / y
>>> _minpoly_op_algebraic_element(Add, q1, q2, x, QQ.frac_field(y))
x**2*y**2 - 2*x*y - y**3 + 1
References
==========
[1] http://en.wikipedia.org/wiki/Resultant
[2] I.M. Isaacs, Proc. Amer. Math. Soc. 25 (1970), 638
"Degrees of sums in a separable field extension".
"""
y = Dummy(str(x))
if mp1 is None:
mp1 = _minpoly_compose(ex1, x, dom)
if mp2 is None:
mp2 = _minpoly_compose(ex2, y, dom)
else:
mp2 = mp2.subs({x: y})
if op is Add:
# mp1a = mp1.subs({x: x - y})
if dom == QQ:
R, X = ring('X', QQ)
p1 = R(dict_from_expr(mp1)[0])
p2 = R(dict_from_expr(mp2)[0])
else:
(p1, p2), _ = parallel_poly_from_expr((mp1, x - y), x, y)
r = p1.compose(p2)
mp1a = r.as_expr()
elif op is Mul:
mp1a = _muly(mp1, x, y)
else:
raise NotImplementedError('option not available')
if op is Mul or dom != QQ:
r = resultant(mp1a, mp2, gens=[y, x])
else:
r = rs_compose_add(p1, p2)
r = expr_from_dict(r.as_expr_dict(), x)
deg1 = degree(mp1, x)
deg2 = degree(mp2, y)
if op is Mul and deg1 == 1 or deg2 == 1:
# if deg1 = 1, then mp1 = x - a; mp1a = x - y - a;
# r = mp2(x - a), so that `r` is irreducible
return r
r = Poly(r, x, domain=dom)
_, factors = r.factor_list()
res = _choose_factor(factors, x, op(ex1, ex2), dom)
return res.as_expr()
def _invertx(p, x):
"""
Returns ``expand_mul(x**degree(p, x)*p.subs(x, 1/x))``
"""
p1 = poly_from_expr(p, x)[0]
n = degree(p1)
a = [c * x**(n - i) for (i,), c in p1.terms()]
return Add(*a)
def _muly(p, x, y):
"""
Returns ``_mexpand(y**deg*p.subs({x:x / y}))``
"""
p1 = poly_from_expr(p, x)[0]
n = degree(p1)
a = [c * x**i * y**(n - i) for (i,), c in p1.terms()]
return Add(*a)
def _minpoly_pow(ex, pw, x, dom, mp=None):
"""
Returns ``minpoly(ex**pw, x)``
Parameters
==========
ex : algebraic element
pw : rational number
x : indeterminate of the polynomial
dom: ground domain
mp : minimal polynomial of ``p``
Examples
========
>>> from sympy import sqrt, QQ, Rational
>>> from sympy.polys.numberfields import _minpoly_pow, minpoly
>>> from sympy.abc import x, y
>>> p = sqrt(1 + sqrt(2))
>>> _minpoly_pow(p, 2, x, QQ)
x**2 - 2*x - 1
>>> minpoly(p**2, x)
x**2 - 2*x - 1
>>> _minpoly_pow(y, Rational(1, 3), x, QQ.frac_field(y))
x**3 - y
>>> minpoly(y**Rational(1, 3), x)
x**3 - y
"""
pw = sympify(pw)
if not mp:
mp = _minpoly_compose(ex, x, dom)
if not pw.is_rational:
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
if pw < 0:
if mp == x:
raise ZeroDivisionError('%s is zero' % ex)
mp = _invertx(mp, x)
if pw == -1:
return mp
pw = -pw
ex = 1/ex
y = Dummy(str(x))
mp = mp.subs({x: y})
n, d = pw.as_numer_denom()
res = Poly(resultant(mp, x**d - y**n, gens=[y]), x, domain=dom)
_, factors = res.factor_list()
res = _choose_factor(factors, x, ex**pw, dom)
return res.as_expr()
def _minpoly_add(x, dom, *a):
"""
returns ``minpoly(Add(*a), dom, x)``
"""
mp = _minpoly_op_algebraic_element(Add, a[0], a[1], x, dom)
p = a[0] + a[1]
for px in a[2:]:
mp = _minpoly_op_algebraic_element(Add, p, px, x, dom, mp1=mp)
p = p + px
return mp
def _minpoly_mul(x, dom, *a):
"""
returns ``minpoly(Mul(*a), dom, x)``
"""
mp = _minpoly_op_algebraic_element(Mul, a[0], a[1], x, dom)
p = a[0] * a[1]
for px in a[2:]:
mp = _minpoly_op_algebraic_element(Mul, p, px, x, dom, mp1=mp)
p = p * px
return mp
def _minpoly_sin(ex, x):
"""
Returns the minimal polynomial of ``sin(ex)``
see http://mathworld.wolfram.com/TrigonometryAngles.html
"""
c, a = ex.args[0].as_coeff_Mul()
if a is pi:
if c.is_rational:
n = c.q
q = sympify(n)
if q.is_prime:
# for a = pi*p/q with q odd prime, using chebyshevt
# write sin(q*a) = mp(sin(a))*sin(a);
# the roots of mp(x) are sin(pi*p/q) for p = 1,..., q - 1
a = dup_chebyshevt(n, ZZ)
return Add(*[x**(n - i - 1)*a[i] for i in range(n)])
if c.p == 1:
if q == 9:
return 64*x**6 - 96*x**4 + 36*x**2 - 3
if n % 2 == 1:
# for a = pi*p/q with q odd, use
# sin(q*a) = 0 to see that the minimal polynomial must be
# a factor of dup_chebyshevt(n, ZZ)
a = dup_chebyshevt(n, ZZ)
a = [x**(n - i)*a[i] for i in range(n + 1)]
r = Add(*a)
_, factors = factor_list(r)
res = _choose_factor(factors, x, ex)
return res
expr = ((1 - cos(2*c*pi))/2)**S.Half
res = _minpoly_compose(expr, x, QQ)
return res
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
def _minpoly_cos(ex, x):
"""
Returns the minimal polynomial of ``cos(ex)``
see http://mathworld.wolfram.com/TrigonometryAngles.html
"""
from sympy import sqrt
c, a = ex.args[0].as_coeff_Mul()
if a is pi:
if c.is_rational:
if c.p == 1:
if c.q == 7:
return 8*x**3 - 4*x**2 - 4*x + 1
if c.q == 9:
return 8*x**3 - 6*x + 1
elif c.p == 2:
q = sympify(c.q)
if q.is_prime:
s = _minpoly_sin(ex, x)
return _mexpand(s.subs({x:sqrt((1 - x)/2)}))
# for a = pi*p/q, cos(q*a) =T_q(cos(a)) = (-1)**p
n = int(c.q)
a = dup_chebyshevt(n, ZZ)
a = [x**(n - i)*a[i] for i in range(n + 1)]
r = Add(*a) - (-1)**c.p
_, factors = factor_list(r)
res = _choose_factor(factors, x, ex)
return res
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
def _minpoly_exp(ex, x):
"""
Returns the minimal polynomial of ``exp(ex)``
"""
c, a = ex.args[0].as_coeff_Mul()
p = sympify(c.p)
q = sympify(c.q)
if a == I*pi:
if c.is_rational:
if c.p == 1 or c.p == -1:
if q == 3:
return x**2 - x + 1
if q == 4:
return x**4 + 1
if q == 6:
return x**4 - x**2 + 1
if q == 8:
return x**8 + 1
if q == 9:
return x**6 - x**3 + 1
if q == 10:
return x**8 - x**6 + x**4 - x**2 + 1
if q.is_prime:
s = 0
for i in range(q):
s += (-x)**i
return s
# x**(2*q) = product(factors)
factors = [cyclotomic_poly(i, x) for i in divisors(2*q)]
mp = _choose_factor(factors, x, ex)
return mp
else:
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
def _minpoly_rootof(ex, x):
"""
Returns the minimal polynomial of a ``CRootOf`` object.
"""
p = ex.expr
p = p.subs({ex.poly.gens[0]:x})
_, factors = factor_list(p, x)
result = _choose_factor(factors, x, ex)
return result
def _minpoly_compose(ex, x, dom):
"""
Computes the minimal polynomial of an algebraic element
using operations on minimal polynomials
Examples
========
>>> from sympy import minimal_polynomial, sqrt, Rational
>>> from sympy.abc import x, y
>>> minimal_polynomial(sqrt(2) + 3*Rational(1, 3), x, compose=True)
x**2 - 2*x - 1
>>> minimal_polynomial(sqrt(y) + 1/y, x, compose=True)
x**2*y**2 - 2*x*y - y**3 + 1
"""
if ex.is_Rational:
return ex.q*x - ex.p
if ex is I:
return x**2 + 1
if hasattr(dom, 'symbols') and ex in dom.symbols:
return x - ex
if dom.is_QQ and _is_sum_surds(ex):
# eliminate the square roots
ex -= x
while 1:
ex1 = _separate_sq(ex)
if ex1 is ex:
return ex
else:
ex = ex1
if ex.is_Add:
res = _minpoly_add(x, dom, *ex.args)
elif ex.is_Mul:
f = Factors(ex).factors
r = sift(f.items(), lambda itx: itx[0].is_Rational and itx[1].is_Rational)
if r[True] and dom == QQ:
ex1 = Mul(*[bx**ex for bx, ex in r[False] + r[None]])
r1 = r[True]
dens = [y.q for _, y in r1]
lcmdens = reduce(lcm, dens, 1)
nums = [base**(y.p*lcmdens // y.q) for base, y in r1]
ex2 = Mul(*nums)
mp1 = minimal_polynomial(ex1, x)
# use the fact that in SymPy canonicalization products of integers
# raised to rational powers are organized in relatively prime
# bases, and that in ``base**(n/d)`` a perfect power is
# simplified with the root
mp2 = ex2.q*x**lcmdens - ex2.p
ex2 = ex2**Rational(1, lcmdens)
res = _minpoly_op_algebraic_element(Mul, ex1, ex2, x, dom, mp1=mp1, mp2=mp2)
else:
res = _minpoly_mul(x, dom, *ex.args)
elif ex.is_Pow:
res = _minpoly_pow(ex.base, ex.exp, x, dom)
elif ex.__class__ is sin:
res = _minpoly_sin(ex, x)
elif ex.__class__ is cos:
res = _minpoly_cos(ex, x)
elif ex.__class__ is exp:
res = _minpoly_exp(ex, x)
elif ex.__class__ is CRootOf:
res = _minpoly_rootof(ex, x)
else:
raise NotAlgebraic("%s doesn't seem to be an algebraic element" % ex)
return res
@public
def minimal_polynomial(ex, x=None, **args):
"""
Computes the minimal polynomial of an algebraic element.
Parameters
==========
ex : algebraic element expression
x : independent variable of the minimal polynomial
Options
=======
compose : if ``True`` ``_minpoly_compose`` is used, if ``False`` the ``groebner`` algorithm
polys : if ``True`` returns a ``Poly`` object
domain : ground domain
Notes
=====
By default ``compose=True``, the minimal polynomial of the subexpressions of ``ex``
are computed, then the arithmetic operations on them are performed using the resultant
and factorization.
If ``compose=False``, a bottom-up algorithm is used with ``groebner``.
The default algorithm stalls less frequently.
If no ground domain is given, it will be generated automatically from the expression.
Examples
========
>>> from sympy import minimal_polynomial, sqrt, solve, QQ
>>> from sympy.abc import x, y
>>> minimal_polynomial(sqrt(2), x)
x**2 - 2
>>> minimal_polynomial(sqrt(2), x, domain=QQ.algebraic_field(sqrt(2)))
x - sqrt(2)
>>> minimal_polynomial(sqrt(2) + sqrt(3), x)
x**4 - 10*x**2 + 1
>>> minimal_polynomial(solve(x**3 + x + 3)[0], x)
x**3 + x + 3
>>> minimal_polynomial(sqrt(y), x)
x**2 - y
"""
from sympy.polys.polytools import degree
from sympy.polys.domains import FractionField
from sympy.core.basic import preorder_traversal
compose = args.get('compose', True)
polys = args.get('polys', False)
dom = args.get('domain', None)
ex = sympify(ex)
if ex.is_number:
# not sure if it's always needed but try it for numbers (issue 8354)
ex = _mexpand(ex, recursive=True)
for expr in preorder_traversal(ex):
if expr.is_AlgebraicNumber:
compose = False
break
if x is not None:
x, cls = sympify(x), Poly
else:
x, cls = Dummy('x'), PurePoly
if not dom:
dom = FractionField(QQ, list(ex.free_symbols)) if ex.free_symbols else QQ
if hasattr(dom, 'symbols') and x in dom.symbols:
raise GeneratorsError("the variable %s is an element of the ground domain %s" % (x, dom))
if compose:
result = _minpoly_compose(ex, x, dom)
result = result.primitive()[1]
c = result.coeff(x**degree(result, x))
if c.is_negative:
result = expand_mul(-result)
return cls(result, x, field=True) if polys else result.collect(x)
if not dom.is_QQ:
raise NotImplementedError("groebner method only works for QQ")
result = _minpoly_groebner(ex, x, cls)
return cls(result, x, field=True) if polys else result.collect(x)
def _minpoly_groebner(ex, x, cls):
"""
Computes the minimal polynomial of an algebraic number
using Groebner bases
Examples
========
>>> from sympy import minimal_polynomial, sqrt, Rational
>>> from sympy.abc import x
>>> minimal_polynomial(sqrt(2) + 3*Rational(1, 3), x, compose=False)
x**2 - 2*x - 1
"""
from sympy.polys.polytools import degree
from sympy.core.function import expand_multinomial
generator = numbered_symbols('a', cls=Dummy)
mapping, symbols, replace = {}, {}, []
def update_mapping(ex, exp, base=None):
a = next(generator)
symbols[ex] = a
if base is not None:
mapping[ex] = a**exp + base
else:
mapping[ex] = exp.as_expr(a)
return a
def bottom_up_scan(ex):
if ex.is_Atom:
if ex is S.ImaginaryUnit:
if ex not in mapping:
return update_mapping(ex, 2, 1)
else:
return symbols[ex]
elif ex.is_Rational:
return ex
elif ex.is_Add:
return Add(*[ bottom_up_scan(g) for g in ex.args ])
elif ex.is_Mul:
return Mul(*[ bottom_up_scan(g) for g in ex.args ])
elif ex.is_Pow:
if ex.exp.is_Rational:
if ex.exp < 0 and ex.base.is_Add:
coeff, terms = ex.base.as_coeff_add()
elt, _ = primitive_element(terms, polys=True)
alg = ex.base - coeff
# XXX: turn this into eval()
inverse = invert(elt.gen + coeff, elt).as_expr()
base = inverse.subs(elt.gen, alg).expand()
if ex.exp == -1:
return bottom_up_scan(base)
else:
ex = base**(-ex.exp)
if not ex.exp.is_Integer:
base, exp = (
ex.base**ex.exp.p).expand(), Rational(1, ex.exp.q)
else:
base, exp = ex.base, ex.exp
base = bottom_up_scan(base)
expr = base**exp
if expr not in mapping:
return update_mapping(expr, 1/exp, -base)
else:
return symbols[expr]
elif ex.is_AlgebraicNumber:
if ex.root not in mapping:
return update_mapping(ex.root, ex.minpoly)
else:
return symbols[ex.root]
raise NotAlgebraic("%s doesn't seem to be an algebraic number" % ex)
def simpler_inverse(ex):
"""
Returns True if it is more likely that the minimal polynomial
algorithm works better with the inverse
"""
if ex.is_Pow:
if (1/ex.exp).is_integer and ex.exp < 0:
if ex.base.is_Add:
return True
if ex.is_Mul:
hit = True
a = []
for p in ex.args:
if p.is_Add:
return False
if p.is_Pow:
if p.base.is_Add and p.exp > 0:
return False
if hit:
return True
return False
inverted = False
ex = expand_multinomial(ex)
if ex.is_AlgebraicNumber:
return ex.minpoly.as_expr(x)
elif ex.is_Rational:
result = ex.q*x - ex.p
else:
inverted = simpler_inverse(ex)
if inverted:
ex = ex**-1
res = None
if ex.is_Pow and (1/ex.exp).is_Integer:
n = 1/ex.exp
res = _minimal_polynomial_sq(ex.base, n, x)
elif _is_sum_surds(ex):
res = _minimal_polynomial_sq(ex, S.One, x)
if res is not None:
result = res
if res is None:
bus = bottom_up_scan(ex)
F = [x - bus] + list(mapping.values())
G = groebner(F, list(symbols.values()) + [x], order='lex')
_, factors = factor_list(G[-1])
# by construction G[-1] has root `ex`
result = _choose_factor(factors, x, ex)
if inverted:
result = _invertx(result, x)
if result.coeff(x**degree(result, x)) < 0:
result = expand_mul(-result)
return result
minpoly = minimal_polynomial
__all__.append('minpoly')
def _coeffs_generator(n):
"""Generate coefficients for `primitive_element()`. """
for coeffs in variations([1, -1], n, repetition=True):
yield list(coeffs)
@public
def primitive_element(extension, x=None, **args):
"""Construct a common number field for all extensions. """
if not extension:
raise ValueError("can't compute primitive element for empty extension")
if x is not None:
x, cls = sympify(x), Poly
else:
x, cls = Dummy('x'), PurePoly
if not args.get('ex', False):
extension = [ AlgebraicNumber(ext, gen=x) for ext in extension ]
g, coeffs = extension[0].minpoly.replace(x), [1]
for ext in extension[1:]:
s, _, g = sqf_norm(g, x, extension=ext)
coeffs = [ s*c for c in coeffs ] + [1]
if not args.get('polys', False):
return g.as_expr(), coeffs
else:
return cls(g), coeffs
generator = numbered_symbols('y', cls=Dummy)
F, Y = [], []
for ext in extension:
y = next(generator)
if ext.is_Poly:
if ext.is_univariate:
f = ext.as_expr(y)
else:
raise ValueError("expected minimal polynomial, got %s" % ext)
else:
f = minpoly(ext, y)
F.append(f)
Y.append(y)
coeffs_generator = args.get('coeffs', _coeffs_generator)
for coeffs in coeffs_generator(len(Y)):
f = x - sum([ c*y for c, y in zip(coeffs, Y)])
G = groebner(F + [f], Y + [x], order='lex', field=True)
H, g = G[:-1], cls(G[-1], x, domain='QQ')
for i, (h, y) in enumerate(zip(H, Y)):
try:
H[i] = Poly(y - h, x,
domain='QQ').all_coeffs() # XXX: composite=False
except CoercionFailed: # pragma: no cover
break # G is not a triangular set
else:
break
else: # pragma: no cover
raise RuntimeError("run out of coefficient configurations")
_, g = g.clear_denoms()
if not args.get('polys', False):
return g.as_expr(), coeffs, H
else:
return g, coeffs, H
def is_isomorphism_possible(a, b):
"""Returns `True` if there is a chance for isomorphism. """
n = a.minpoly.degree()
m = b.minpoly.degree()
if m % n != 0:
return False
if n == m:
return True
da = a.minpoly.discriminant()
db = b.minpoly.discriminant()
i, k, half = 1, m//n, db//2
while True:
p = sieve[i]
P = p**k
if P > half:
break
if ((da % p) % 2) and not (db % P):
return False
i += 1
return True
def field_isomorphism_pslq(a, b):
"""Construct field isomorphism using PSLQ algorithm. """
if not a.root.is_real or not b.root.is_real:
raise NotImplementedError("PSLQ doesn't support complex coefficients")
f = a.minpoly
g = b.minpoly.replace(f.gen)
n, m, prev = 100, b.minpoly.degree(), None
for i in range(1, 5):
A = a.root.evalf(n)
B = b.root.evalf(n)
basis = [1, B] + [ B**i for i in range(2, m) ] + [A]
dps, mp.dps = mp.dps, n
coeffs = pslq(basis, maxcoeff=int(1e10), maxsteps=1000)
mp.dps = dps
if coeffs is None:
break
if coeffs != prev:
prev = coeffs
else:
break
coeffs = [S(c)/coeffs[-1] for c in coeffs[:-1]]
while not coeffs[-1]:
coeffs.pop()
coeffs = list(reversed(coeffs))
h = Poly(coeffs, f.gen, domain='QQ')
if f.compose(h).rem(g).is_zero:
d, approx = len(coeffs) - 1, 0
for i, coeff in enumerate(coeffs):
approx += coeff*B**(d - i)
if A*approx < 0:
return [ -c for c in coeffs ]
else:
return coeffs
elif f.compose(-h).rem(g).is_zero:
return [ -c for c in coeffs ]
else:
n *= 2
return None
def field_isomorphism_factor(a, b):
"""Construct field isomorphism via factorization. """
_, factors = factor_list(a.minpoly, extension=b)
for f, _ in factors:
if f.degree() == 1:
coeffs = f.rep.TC().to_sympy_list()
d, terms = len(coeffs) - 1, []
for i, coeff in enumerate(coeffs):
terms.append(coeff*b.root**(d - i))
root = Add(*terms)
if (a.root - root).evalf(chop=True) == 0:
return coeffs
if (a.root + root).evalf(chop=True) == 0:
return [ -c for c in coeffs ]
else:
return None
@public
def field_isomorphism(a, b, **args):
"""Construct an isomorphism between two number fields. """
a, b = sympify(a), sympify(b)
if not a.is_AlgebraicNumber:
a = AlgebraicNumber(a)
if not b.is_AlgebraicNumber:
b = AlgebraicNumber(b)
if a == b:
return a.coeffs()
n = a.minpoly.degree()
m = b.minpoly.degree()
if n == 1:
return [a.root]
if m % n != 0:
return None
if args.get('fast', True):
try:
result = field_isomorphism_pslq(a, b)
if result is not None:
return result
except NotImplementedError:
pass
return field_isomorphism_factor(a, b)
@public
def to_number_field(extension, theta=None, **args):
"""Express `extension` in the field generated by `theta`. """
gen = args.get('gen')
if hasattr(extension, '__iter__'):
extension = list(extension)
else:
extension = [extension]
if len(extension) == 1 and type(extension[0]) is tuple:
return AlgebraicNumber(extension[0])
minpoly, coeffs = primitive_element(extension, gen, polys=True)
root = sum([ coeff*ext for coeff, ext in zip(coeffs, extension) ])
if theta is None:
return AlgebraicNumber((minpoly, root))
else:
theta = sympify(theta)
if not theta.is_AlgebraicNumber:
theta = AlgebraicNumber(theta, gen=gen)
coeffs = field_isomorphism(root, theta)
if coeffs is not None:
return AlgebraicNumber(theta, coeffs)
else:
raise IsomorphismFailed(
"%s is not in a subfield of %s" % (root, theta.root))
class IntervalPrinter(LambdaPrinter):
"""Use ``lambda`` printer but print numbers as ``mpi`` intervals. """
def _print_Integer(self, expr):
return "mpi('%s')" % super(IntervalPrinter, self)._print_Integer(expr)
def _print_Rational(self, expr):
return "mpi('%s')" % super(IntervalPrinter, self)._print_Rational(expr)
def _print_Pow(self, expr):
return super(IntervalPrinter, self)._print_Pow(expr, rational=True)
@public
def isolate(alg, eps=None, fast=False):
"""Give a rational isolating interval for an algebraic number. """
alg = sympify(alg)
if alg.is_Rational:
return (alg, alg)
elif not alg.is_real:
raise NotImplementedError(
"complex algebraic numbers are not supported")
func = lambdify((), alg, modules="mpmath", printer=IntervalPrinter())
poly = minpoly(alg, polys=True)
intervals = poly.intervals(sqf=True)
dps, done = mp.dps, False
try:
while not done:
alg = func()
for a, b in intervals:
if a <= alg.a and alg.b <= b:
done = True
break
else:
mp.dps *= 2
finally:
mp.dps = dps
if eps is not None:
a, b = poly.refine_root(a, b, eps=eps, fast=fast)
return (a, b)
| bsd-3-clause | 1,666,077,782,324,539,000 | -6,019,660,211,714,172,000 | 27.58723 | 97 | 0.525842 | false |
Hybrid-Cloud/conveyor | conveyor/conveyorheat/engine/lifecycle_plugin.py | 9 | 2138 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class LifecyclePlugin(object):
"""Base class for pre-op and post-op work on a stack.
Implementations should extend this class and override the methods.
"""
def do_pre_op(self, cnxt, stack, current_stack=None, action=None):
"""Method to be run by heat before stack operations."""
pass
def do_post_op(self, cnxt, stack, current_stack=None, action=None,
is_stack_failure=False):
"""Method to be run by heat after stack operations, including failures.
On failure to execute all the registered pre_ops, this method will be
called if and only if the corresponding pre_op was successfully called.
On failures of the actual stack operation, this method will
be called if all the pre operations were successfully called.
"""
pass
def get_ordinal(self):
"""Get the sort order for pre and post operation execution.
The values returned by get_ordinal are used to create a partial order
for pre and post operation method invocations. The default ordinal
value of 100 may be overridden.
If class1inst.ordinal() < class2inst.ordinal(), then the method on
class1inst will be executed before the method on class2inst.
If class1inst.ordinal() > class2inst.ordinal(), then the method on
class1inst will be executed after the method on class2inst.
If class1inst.ordinal() == class2inst.ordinal(), then the order of
method invocation is indeterminate.
"""
return 100
| apache-2.0 | 5,843,151,693,956,571,000 | -4,518,254,311,740,385,000 | 43.541667 | 79 | 0.686623 | false |
samdoran/ansible | lib/ansible/modules/cloud/webfaction/webfaction_domain.py | 63 | 5507 | #!/usr/bin/python
#
# Create Webfaction domains and subdomains using Ansible and the Webfaction API
#
# ------------------------------------------
#
# (c) Quentin Stafford-Fraser 2015
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: webfaction_domain
short_description: Add or remove domains and subdomains on Webfaction
description:
- Add or remove domains or subdomains on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- If you are I(deleting) domains by using C(state=absent), then note that if you specify subdomains, just those particular subdomains will be deleted.
If you don't specify subdomains, the domain will be deleted.
- >
You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API.
The location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as
your host, you may want to add C(serial: 1) to the plays.
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the domain
required: true
state:
description:
- Whether the domain should exist
required: false
choices: ['present', 'absent']
default: "present"
subdomains:
description:
- Any subdomains to create.
required: false
default: null
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
'''
EXAMPLES = '''
- name: Create a test domain
webfaction_domain:
name: mydomain.com
state: present
subdomains:
- www
- blog
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
- name: Delete test domain and any subdomains
webfaction_domain:
name: mydomain.com
state: absent
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=False, choices=['present', 'absent'], default='present'),
subdomains = dict(required=False, default=[]),
login_name = dict(required=True),
login_password = dict(required=True, no_log=True),
),
supports_check_mode=True
)
domain_name = module.params['name']
domain_state = module.params['state']
domain_subdomains = module.params['subdomains']
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
domain_list = webfaction.list_domains(session_id)
domain_map = dict([(i['domain'], i) for i in domain_list])
existing_domain = domain_map.get(domain_name)
result = {}
# Here's where the real stuff happens
if domain_state == 'present':
# Does an app with this name already exist?
if existing_domain:
if set(existing_domain['subdomains']) >= set(domain_subdomains):
# If it exists with the right subdomains, we don't change anything.
module.exit_json(
changed = False,
)
positional_args = [session_id, domain_name] + domain_subdomains
if not module.check_mode:
# If this isn't a dry run, create the app
# print positional_args
result.update(
webfaction.create_domain(
*positional_args
)
)
elif domain_state == 'absent':
# If the app's already not there, nothing changed.
if not existing_domain:
module.exit_json(
changed = False,
)
positional_args = [session_id, domain_name] + domain_subdomains
if not module.check_mode:
# If this isn't a dry run, delete the app
result.update(
webfaction.delete_domain(*positional_args)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(domain_state))
module.exit_json(
changed = True,
result = result
)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | 414,300,924,134,471,800 | 8,280,367,803,159,682,000 | 29.425414 | 154 | 0.619757 | false |
zhreshold/mxnet | example/gluon/embedding_learning/data.py | 30 | 6391 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import random
import numpy as np
import mxnet as mx
from mxnet import nd
def transform(data, target_wd, target_ht, is_train, box):
"""Crop and normnalize an image nd array."""
if box is not None:
x, y, w, h = box
data = data[y:min(y+h, data.shape[0]), x:min(x+w, data.shape[1])]
# Resize to target_wd * target_ht.
data = mx.image.imresize(data, target_wd, target_ht)
# Normalize in the same way as the pre-trained model.
data = data.astype(np.float32) / 255.0
data = (data - mx.nd.array([0.485, 0.456, 0.406])) / mx.nd.array([0.229, 0.224, 0.225])
if is_train:
if random.random() < 0.5:
data = nd.flip(data, axis=1)
data, _ = mx.image.random_crop(data, (224, 224))
else:
data, _ = mx.image.center_crop(data, (224, 224))
# Transpose from (target_wd, target_ht, 3)
# to (3, target_wd, target_ht).
data = nd.transpose(data, (2, 0, 1))
# If image is greyscale, repeat 3 times to get RGB image.
if data.shape[0] == 1:
data = nd.tile(data, (3, 1, 1))
return data.reshape((1,) + data.shape)
class CUB200Iter(mx.io.DataIter):
"""Iterator for the CUB200-2011 dataset.
Parameters
----------
data_path : str,
The path to dataset directory.
batch_k : int,
Number of images per class in a batch.
batch_size : int,
Batch size.
batch_size : tupple,
Data shape. E.g. (3, 224, 224).
is_train : bool,
Training data or testig data. Training batches are randomly sampled.
Testing batches are loaded sequentially until reaching the end.
"""
def __init__(self, data_path, batch_k, batch_size, data_shape, is_train):
super(CUB200Iter, self).__init__(batch_size)
self.data_shape = (batch_size,) + data_shape
self.batch_size = batch_size
self.provide_data = [('data', self.data_shape)]
self.batch_k = batch_k
self.is_train = is_train
self.train_image_files = [[] for _ in range(100)]
self.test_image_files = []
self.test_labels = []
self.boxes = {}
self.test_count = 0
with open(os.path.join(data_path, 'images.txt'), 'r') as f_img, \
open(os.path.join(data_path, 'image_class_labels.txt'), 'r') as f_label, \
open(os.path.join(data_path, 'bounding_boxes.txt'), 'r') as f_box:
for line_img, line_label, line_box in zip(f_img, f_label, f_box):
fname = os.path.join(data_path, 'images', line_img.strip().split()[-1])
label = int(line_label.strip().split()[-1]) - 1
box = [int(float(v)) for v in line_box.split()[-4:]]
self.boxes[fname] = box
# Following "Deep Metric Learning via Lifted Structured Feature Embedding" paper,
# we use the first 100 classes for training, and the remaining for testing.
if label < 100:
self.train_image_files[label].append(fname)
else:
self.test_labels.append(label)
self.test_image_files.append(fname)
self.n_test = len(self.test_image_files)
def get_image(self, img, is_train):
"""Load and transform an image."""
img_arr = mx.image.imread(img)
img_arr = transform(img_arr, 256, 256, is_train, self.boxes[img])
return img_arr
def sample_train_batch(self):
"""Sample a training batch (data and label)."""
batch = []
labels = []
num_groups = self.batch_size // self.batch_k
# For CUB200, we use the first 100 classes for training.
sampled_classes = np.random.choice(100, num_groups, replace=False)
for i in range(num_groups):
img_fnames = np.random.choice(self.train_image_files[sampled_classes[i]],
self.batch_k, replace=False)
batch += [self.get_image(img_fname, is_train=True) for img_fname in img_fnames]
labels += [sampled_classes[i] for _ in range(self.batch_k)]
return nd.concatenate(batch, axis=0), labels
def get_test_batch(self):
"""Sample a testing batch (data and label)."""
batch_size = self.batch_size
batch = [self.get_image(self.test_image_files[(self.test_count*batch_size + i)
% len(self.test_image_files)],
is_train=False) for i in range(batch_size)]
labels = [self.test_labels[(self.test_count*batch_size + i)
% len(self.test_image_files)] for i in range(batch_size)]
return nd.concatenate(batch, axis=0), labels
def reset(self):
"""Reset an iterator."""
self.test_count = 0
def next(self):
"""Return a batch."""
if self.is_train:
data, labels = self.sample_train_batch()
else:
if self.test_count * self.batch_size < len(self.test_image_files):
data, labels = self.get_test_batch()
self.test_count += 1
else:
self.test_count = 0
raise StopIteration
return mx.io.DataBatch(data=[data], label=[labels])
def cub200_iterator(data_path, batch_k, batch_size, data_shape):
"""Return training and testing iterator for the CUB200-2011 dataset."""
return (CUB200Iter(data_path, batch_k, batch_size, data_shape, is_train=True),
CUB200Iter(data_path, batch_k, batch_size, data_shape, is_train=False))
| apache-2.0 | 6,590,031,944,408,372,000 | -1,872,427,379,725,320,700 | 39.449367 | 97 | 0.592865 | false |
rogeriofalcone/treeio | core/migrations/0008_auto__add_field_attachment_filename.py | 1 | 21062 | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Attachment.filename'
db.add_column('core_attachment', 'filename', self.gf('django.db.models.fields.CharField')(default='', max_length=64), keep_default=False)
def backwards(self, orm):
# Deleting field 'Attachment.filename'
db.delete_column('core_attachment', 'filename')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.accessentity': {
'Meta': {'object_name': 'AccessEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.attachment': {
'Meta': {'object_name': 'Attachment'},
'attached_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'attached_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Object']", 'null': 'True', 'blank': 'True'}),
'attached_record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.UpdateRecord']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'uploaded_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']"})
},
'core.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']", 'null': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dislikes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments_disliked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments_liked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"})
},
'core.configsetting': {
'Meta': {'object_name': 'ConfigSetting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'core.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group', '_ormbases': ['core.AccessEntity']},
'accessentity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.AccessEntity']", 'unique': 'True', 'primary_key': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['core.Group']"})
},
'core.invitation': {
'Meta': {'object_name': 'Invitation'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Group']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']", 'null': 'True', 'blank': 'True'})
},
'core.location': {
'Meta': {'object_name': 'Location', '_ormbases': ['core.Object']},
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['core.Location']"})
},
'core.module': {
'Meta': {'ordering': "['name']", 'object_name': 'Module', '_ormbases': ['core.Object']},
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'display': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'system': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'core.modulesetting': {
'Meta': {'object_name': 'ModuleSetting'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'module': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Module']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'perspective': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Perspective']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'core.object': {
'Meta': {'object_name': 'Object'},
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Comment']"}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'objects_created'", 'null': 'True', 'to': "orm['core.User']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dislikes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_disliked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'full_access': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_full_access'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.AccessEntity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_liked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'links': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'links_rel_+'", 'null': 'True', 'to': "orm['core.Object']"}),
'nuvius_resource': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'object_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'object_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'read_access': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'objects_read_access'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.AccessEntity']"}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.Tag']", 'null': 'True', 'blank': 'True'}),
'trash': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.page': {
'Meta': {'ordering': "['name']", 'object_name': 'Page', '_ormbases': ['core.Object']},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.PageFolder']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'core.pagefolder': {
'Meta': {'object_name': 'PageFolder', '_ormbases': ['core.Object']},
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'})
},
'core.perspective': {
'Meta': {'object_name': 'Perspective', '_ormbases': ['core.Object']},
'details': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'modules': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.Module']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'})
},
'core.revision': {
'Meta': {'object_name': 'Revision'},
'change_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Object']"}),
'previous': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'next'", 'unique': 'True', 'null': 'True', 'to': "orm['core.Revision']"})
},
'core.revisionfield': {
'Meta': {'object_name': 'RevisionField'},
'field': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'field_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Revision']"}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_key': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisionfield_key'", 'null': 'True', 'to': "orm['core.Object']"}),
'value_key_acc': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisionfield_key_acc'", 'null': 'True', 'to': "orm['core.AccessEntity']"}),
'value_m2m': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'revisionfield_m2m'", 'symmetrical': 'False', 'to': "orm['core.Object']"}),
'value_m2m_acc': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'revisionfield_m2m_acc'", 'symmetrical': 'False', 'to': "orm['core.AccessEntity']"})
},
'core.tag': {
'Meta': {'ordering': "['name']", 'object_name': 'Tag'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'core.updaterecord': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'UpdateRecord'},
'about': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'updates'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Object']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sent_updates'", 'null': 'True', 'to': "orm['core.User']"}),
'body': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'comments_on_updates'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Comment']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dislikes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'updates_disliked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'format_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'format_strings': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'updates_liked'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'received_updates'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.AccessEntity']"}),
'record_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sent_updates'", 'null': 'True', 'to': "orm['core.Object']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'core.user': {
'Meta': {'ordering': "['name']", 'object_name': 'User', '_ormbases': ['core.AccessEntity']},
'accessentity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.AccessEntity']", 'unique': 'True', 'primary_key': 'True'}),
'default_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_user_set'", 'null': 'True', 'to': "orm['core.Group']"}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_access': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'other_groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.Group']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'core.widget': {
'Meta': {'ordering': "['weight']", 'object_name': 'Widget'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'module_name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'perspective': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Perspective']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']"}),
'weight': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'widget_name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
}
}
complete_apps = ['core']
| mit | 8,102,447,978,209,555,000 | 9,155,010,907,880,094,000 | 85.319672 | 217 | 0.552891 | false |
vlukes/sfepy | tests/test_normals.py | 4 | 2742 | from __future__ import absolute_import
import numpy as nm
from sfepy.base.testing import TestCommon
expected_normals = { # Need to be normalized!
'2_3' : nm.array([[ 0, -1],
[ 1, 1],
[-1, 0]], dtype=nm.float64),
'2_4' : nm.array([[ 0, -1],
[ 1, 0],
[ 0, 1],
[-1, 0]], dtype=nm.float64),
'3_4' : nm.array([[ 0, 0, -1],
[-1, 0, 0],
[ 0, -1, 0],
[ 1, 1, 1]], dtype=nm.float64),
'3_8' : nm.array([[ 0, 0, -1],
[-1, 0, 0],
[ 0, -1, 0],
[ 0, 0, 1],
[ 1, 0, 0],
[ 0, 1, 0]], dtype=nm.float64),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
return Test(conf=conf, options=options)
def test_normals(self):
"""
Check orientations of surface normals on the reference elements.
"""
import sfepy
from sfepy.discrete import Integral, PolySpace
from sfepy.discrete.fem import Mesh, FEDomain
from sfepy.discrete.fem.mappings import SurfaceMapping
from sfepy.linalg import normalize_vectors
ok = True
for geom in ['2_3', '2_4', '3_4', '3_8']:
mesh = Mesh.from_file('meshes/elements/%s_1.mesh' % geom,
prefix_dir=sfepy.data_dir)
domain = FEDomain('domain', mesh)
surface = domain.create_region('Surface', 'vertices of surface',
'facet')
domain.create_surface_group(surface)
sd = domain.surface_groups[surface.name]
coors = domain.get_mesh_coors()
gel = domain.geom_els[geom].surface_facet
ps = PolySpace.any_from_args('aux', gel, 1)
mapping = SurfaceMapping(coors, sd.get_connectivity(), ps)
integral = Integral('i', order=1)
vals, weights = integral.get_qp(gel.name)
# Evaluate just in the first quadrature point...
geo = mapping.get_mapping(vals[:1], weights[:1])
expected = expected_normals[geom].copy()
normalize_vectors(expected)
_ok = nm.allclose(expected, geo.normal[:, 0, :, 0],
rtol=0.0, atol=1e-14)
self.report('%s: %s' % (geom, _ok))
if not _ok:
self.report('expected:')
self.report(expected)
self.report('actual:')
self.report(geo.normal[:, 0, :, 0])
ok = ok and _ok
return ok
| bsd-3-clause | -8,956,580,434,140,165,000 | 3,167,822,239,352,147,000 | 32.851852 | 76 | 0.464989 | false |
goliveirab/odoo | addons/mail/mail_followers.py | 168 | 12482 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import threading
from openerp.osv import osv, fields
from openerp import tools, SUPERUSER_ID
from openerp.tools.translate import _
from openerp.tools.mail import plaintext2html
class mail_followers(osv.Model):
""" mail_followers holds the data related to the follow mechanism inside
OpenERP. Partners can choose to follow documents (records) of any kind
that inherits from mail.thread. Following documents allow to receive
notifications for new messages.
A subscription is characterized by:
:param: res_model: model of the followed objects
:param: res_id: ID of resource (may be 0 for every objects)
"""
_name = 'mail.followers'
_rec_name = 'partner_id'
_log_access = False
_description = 'Document Followers'
_columns = {
'res_model': fields.char('Related Document Model',
required=True, select=1,
help='Model of the followed resource'),
'res_id': fields.integer('Related Document ID', select=1,
help='Id of the followed resource'),
'partner_id': fields.many2one('res.partner', string='Related Partner',
ondelete='cascade', required=True, select=1),
'subtype_ids': fields.many2many('mail.message.subtype', string='Subtype',
help="Message subtypes followed, meaning subtypes that will be pushed onto the user's Wall."),
}
#
# Modifying followers change access rights to individual documents. As the
# cache may contain accessible/inaccessible data, one has to refresh it.
#
def create(self, cr, uid, vals, context=None):
res = super(mail_followers, self).create(cr, uid, vals, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(mail_followers, self).write(cr, uid, ids, vals, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(mail_followers, self).unlink(cr, uid, ids, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
_sql_constraints = [('mail_followers_res_partner_res_model_id_uniq','unique(res_model,res_id,partner_id)','Error, a partner cannot follow twice the same object.')]
class mail_notification(osv.Model):
""" Class holding notifications pushed to partners. Followers and partners
added in 'contacts to notify' receive notifications. """
_name = 'mail.notification'
_rec_name = 'partner_id'
_log_access = False
_description = 'Notifications'
_columns = {
'partner_id': fields.many2one('res.partner', string='Contact',
ondelete='cascade', required=True, select=1),
'is_read': fields.boolean('Read', select=1, oldname='read'),
'starred': fields.boolean('Starred', select=1,
help='Starred message that goes into the todo mailbox'),
'message_id': fields.many2one('mail.message', string='Message',
ondelete='cascade', required=True, select=1),
}
_defaults = {
'is_read': False,
'starred': False,
}
def init(self, cr):
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_notification_partner_id_read_starred_message_id',))
if not cr.fetchone():
cr.execute('CREATE INDEX mail_notification_partner_id_read_starred_message_id ON mail_notification (partner_id, is_read, starred, message_id)')
def get_partners_to_email(self, cr, uid, ids, message, context=None):
""" Return the list of partners to notify, based on their preferences.
:param browse_record message: mail.message to notify
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
"""
notify_pids = []
for notification in self.browse(cr, uid, ids, context=context):
if notification.is_read:
continue
partner = notification.partner_id
# Do not send to partners without email address defined
if not partner.email:
continue
# Do not send to partners having same email address than the author (can cause loops or bounce effect due to messy database)
if message.author_id and message.author_id.email == partner.email:
continue
# Partner does not want to receive any emails or is opt-out
if partner.notify_email == 'none':
continue
notify_pids.append(partner.id)
return notify_pids
def get_signature_footer(self, cr, uid, user_id, res_model=None, res_id=None, context=None, user_signature=True):
""" Format a standard footer for notification emails (such as pushed messages
notification or invite emails).
Format:
<p>--<br />
Administrator
</p>
<div>
<small>Sent from <a ...>Your Company</a> using <a ...>OpenERP</a>.</small>
</div>
"""
footer = ""
if not user_id:
return footer
# add user signature
user = self.pool.get("res.users").browse(cr, SUPERUSER_ID, [user_id], context=context)[0]
if user_signature:
if user.signature:
signature = user.signature
else:
signature = "--<br />%s" % user.name
footer = tools.append_content_to_html(footer, signature, plaintext=False)
# add company signature
if user.company_id.website:
website_url = ('http://%s' % user.company_id.website) if not user.company_id.website.lower().startswith(('http:', 'https:')) \
else user.company_id.website
company = "<a style='color:inherit' href='%s'>%s</a>" % (website_url, user.company_id.name)
else:
company = user.company_id.name
sent_by = _('Sent by %(company)s using %(odoo)s')
signature_company = '<br /><small>%s</small>' % (sent_by % {
'company': company,
'odoo': "<a style='color:inherit' href='https://www.odoo.com/'>Odoo</a>"
})
footer = tools.append_content_to_html(footer, signature_company, plaintext=False, container_tag='div')
return footer
def update_message_notification(self, cr, uid, ids, message_id, partner_ids, context=None):
existing_pids = set()
new_pids = set()
new_notif_ids = []
for notification in self.browse(cr, uid, ids, context=context):
existing_pids.add(notification.partner_id.id)
# update existing notifications
self.write(cr, uid, ids, {'is_read': False}, context=context)
# create new notifications
new_pids = set(partner_ids) - existing_pids
for new_pid in new_pids:
new_notif_ids.append(self.create(cr, uid, {'message_id': message_id, 'partner_id': new_pid, 'is_read': False}, context=context))
return new_notif_ids
def _notify_email(self, cr, uid, ids, message_id, force_send=False, user_signature=True, context=None):
message = self.pool['mail.message'].browse(cr, SUPERUSER_ID, message_id, context=context)
# compute partners
email_pids = self.get_partners_to_email(cr, uid, ids, message, context=None)
if not email_pids:
return True
# compute email body (signature, company data)
body_html = message.body
# add user signature except for mail groups, where users are usually adding their own signatures already
user_id = message.author_id and message.author_id.user_ids and message.author_id.user_ids[0] and message.author_id.user_ids[0].id or None
signature_company = self.get_signature_footer(cr, uid, user_id, res_model=message.model, res_id=message.res_id, context=context, user_signature=(user_signature and message.model != 'mail.group'))
if signature_company:
body_html = tools.append_content_to_html(body_html, signature_company, plaintext=False, container_tag='div')
# compute email references
references = message.parent_id.message_id if message.parent_id else False
# custom values
custom_values = dict()
if message.model and message.res_id and self.pool.get(message.model) and hasattr(self.pool[message.model], 'message_get_email_values'):
custom_values = self.pool[message.model].message_get_email_values(cr, uid, message.res_id, message, context=context)
# create email values
max_recipients = 50
chunks = [email_pids[x:x + max_recipients] for x in xrange(0, len(email_pids), max_recipients)]
email_ids = []
for chunk in chunks:
mail_values = {
'mail_message_id': message.id,
'auto_delete': (context or {}).get('mail_auto_delete', True),
'mail_server_id': (context or {}).get('mail_server_id', False),
'body_html': body_html,
'recipient_ids': [(4, id) for id in chunk],
'references': references,
}
mail_values.update(custom_values)
email_ids.append(self.pool.get('mail.mail').create(cr, uid, mail_values, context=context))
# NOTE:
# 1. for more than 50 followers, use the queue system
# 2. do not send emails immediately if the registry is not loaded,
# to prevent sending email during a simple update of the database
# using the command-line.
if force_send and len(chunks) < 2 and \
(not self.pool._init or
getattr(threading.currentThread(), 'testing', False)):
self.pool.get('mail.mail').send(cr, uid, email_ids, context=context)
return True
def _notify(self, cr, uid, message_id, partners_to_notify=None, context=None,
force_send=False, user_signature=True):
""" Send by email the notification depending on the user preferences
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
:param bool force_send: if True, the generated mail.mail is
immediately sent after being created, as if the scheduler
was executed for this message only.
:param bool user_signature: if True, the generated mail.mail body is
the body of the related mail.message with the author's signature
"""
notif_ids = self.search(cr, SUPERUSER_ID, [('message_id', '=', message_id), ('partner_id', 'in', partners_to_notify)], context=context)
# update or create notifications
new_notif_ids = self.update_message_notification(cr, SUPERUSER_ID, notif_ids, message_id, partners_to_notify, context=context)
# mail_notify_noemail (do not send email) or no partner_ids: do not send, return
if context and context.get('mail_notify_noemail'):
return True
# browse as SUPERUSER_ID because of access to res_partner not necessarily allowed
self._notify_email(cr, SUPERUSER_ID, new_notif_ids, message_id, force_send, user_signature, context=context)
| agpl-3.0 | 4,829,122,742,478,622,000 | 186,250,208,553,808,160 | 47.568093 | 203 | 0.61825 | false |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/statsmodels-0.5.0-py2.7-linux-x86_64.egg/statsmodels/datasets/statecrime/data.py | 3 | 2985 | #! /usr/bin/env python
"""Statewide Crime Data"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """Public domain."""
TITLE = """Statewide Crime Data 2009"""
SOURCE = """
All data is for 2009 and was obtained from the American Statistical Abstracts except as indicated below.
"""
DESCRSHORT = """State crime data 2009"""
DESCRLONG = DESCRSHORT
#suggested notes
NOTE = """
Number of observations: 51
Number of variables: 8
Variable name definitions:
state
All 50 states plus DC.
violent
Rate of violent crimes / 100,000 population. Includes murder, forcible
rape, robbery, and aggravated assault. Numbers for Illinois and Minnesota
do not include forcible rapes. Footnote included with the American
Statistical Abstract table reads:
"The data collection methodology for the offense of forcible
rape used by the Illinois and the Minnesota state Uniform Crime Reporting
(UCR) Programs (with the exception of Rockford, Illinois, and Minneapolis
and St. Paul, Minnesota) does not comply with national UCR guidelines.
Consequently, their state figures for forcible rape and violent crime (of
which forcible rape is a part) are not published in this table."
murder
Rate of murders / 100,000 population.
hs_grad
Precent of population having graduated from high school or higher.
poverty
% of individuals below the poverty line
white
Percent of population that is one race - white only. From 2009 American
Community Survey
single
Calculated from 2009 1-year American Community Survey obtained obtained
from Census. Variable is Male householder, no wife present, family
household combined with Female household, no husband prsent, family
household, divided by the total number of Family households.
urban
% of population in Urbanized Areas as of 2010 Census. Urbanized Areas are
area of 50,000 or more people."""
import numpy as np
from statsmodels.datasets import utils as du
from os.path import dirname, abspath
def load():
"""
Load the statecrime data and return a Dataset class instance.
Returns
-------
Dataset instance:
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
##### SET THE INDICES #####
#NOTE: None for exog_idx is the complement of endog_idx
return du.process_recarray(data, endog_idx=2, exog_idx=[7, 4, 3, 5],
dtype=float)
def load_pandas():
data = _get_data()
##### SET THE INDICES #####
#NOTE: None for exog_idx is the complement of endog_idx
return du.process_recarray_pandas(data, endog_idx=2, exog_idx=[7,4,3,5],
dtype=float, index_idx=0)
def _get_data():
filepath = dirname(abspath(__file__))
##### EDIT THE FOLLOWING TO POINT TO DatasetName.csv #####
data = np.recfromtxt(open(filepath + '/statecrime.csv', 'rb'),
delimiter=",", names=True, dtype=None)
return data
| apache-2.0 | -542,683,169,620,933,400 | 5,910,932,320,504,143,000 | 34.117647 | 104 | 0.690117 | false |
jimsimon/sky_engine | testing/legion/tools/legion.py | 16 | 5772 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A helper module to run Legion multi-machine tests.
Example usage with 1 task machine:
$ testing/legion/tools/legion.py run \
--controller-isolated out/Release/example_test_controller.isolated \
--dimension os Ubuntu-14.04 \
--task-name test-task-name \
--task task_machine out/Release/example_task_machine.isolated
Example usage with 2 task machines with the same isolated file:
$ testing/legion/tools/legion.py run \
--controller-isolated out/Release/example_test_controller.isolated \
--dimension os Ubuntu-14.04 \
--task-name test-task-name \
--task task_machine_1 out/Release/example_task_machine.isolated \
--task task_machine_2 out/Release/example_task_machine.isolated
Example usage with 2 task machines with different isolated file:
$ testing/legion/tools/legion.py run \
--controller-isolated out/Release/example_test_controller.isolated \
--dimension os Ubuntu-14.04 \
--task-name test-task-name \
--task task_machine_1 out/Release/example_task_machine_1.isolated \
--task task_machine_2 out/Release/example_task_machine_2.isolated
"""
import argparse
import logging
import os
import subprocess
import sys
THIS_DIR = os.path.split(__file__)[0]
SWARMING_DIR = os.path.join(THIS_DIR, '..', '..', '..', 'tools',
'swarming_client')
ISOLATE_PY = os.path.join(SWARMING_DIR, 'isolate.py')
SWARMING_PY = os.path.join(SWARMING_DIR, 'swarming.py')
LOGGING_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR']
class Error(Exception):
pass
def GetArgs():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('action', choices=['run', 'trigger'],
help='The swarming action to perform.')
parser.add_argument('-f', '--format-only', action='store_true',
help='If true the .isolated files are archived but '
'swarming is not called, only the command line is built.')
parser.add_argument('--controller-isolated', required=True,
help='The isolated file for the test controller.')
parser.add_argument('--isolate-server', help='Optional. The isolated server '
'to use.')
parser.add_argument('--swarming-server', help='Optional. The swarming server '
'to use.')
parser.add_argument('--task-name', help='Optional. The swarming task name '
'to use.')
parser.add_argument('--dimension', action='append', dest='dimensions',
nargs=2, default=[], help='Dimensions to pass to '
'swarming.py. This is in the form of --dimension key '
'value. The minimum required is --dimension os <OS>')
parser.add_argument('--task', action='append', dest='tasks',
nargs=2, default=[], help='List of task names used in '
'the test controller. This is in the form of --task name '
'.isolated and is passed to the controller as --name '
'<ISOLATED HASH>.')
parser.add_argument('--controller-var', action='append',
dest='controller_vars', nargs=2, default=[],
help='Command line vars to pass to the controller. These '
'are in the form of --controller-var name value and are '
'passed to the controller as --name value.')
parser.add_argument('-v', '--verbosity', default=0, action='count')
return parser.parse_args()
def RunCommand(cmd, stream_stdout=False):
"""Runs the command line and streams stdout if requested."""
kwargs = {
'args': cmd,
'stderr': subprocess.PIPE,
}
if not stream_stdout:
kwargs['stdout'] = subprocess.PIPE
p = subprocess.Popen(**kwargs)
stdout, stderr = p.communicate()
if p.returncode:
raise Error(stderr)
if not stream_stdout:
logging.debug(stdout)
return stdout
def Archive(isolated, isolate_server=None):
"""Calls isolate.py archive with the given args."""
cmd = [
sys.executable,
ISOLATE_PY,
'archive',
'--isolated', isolated,
]
if isolate_server:
cmd.extend(['--isolate-server', isolate_server])
print ' '.join(cmd)
return RunCommand(cmd).split()[0] # The isolated hash
def GetSwarmingCommandLine(args):
"""Builds and returns the command line for swarming.py run|trigger."""
cmd = [
sys.executable,
SWARMING_PY,
args.action,
args.controller_isolated,
]
if args.isolate_server:
cmd.extend(['--isolate-server', args.isolate_server])
if args.swarming_server:
cmd.extend(['--swarming', args.swarming_server])
if args.task_name:
cmd.extend(['--task-name', args.task_name])
# swarming.py dimensions
for name, value in args.dimensions:
cmd.extend(['--dimension', name, value])
cmd.append('--')
# Specify the output dir
cmd.extend(['--output-dir', '${ISOLATED_OUTDIR}'])
# Task name/hash values
for name, isolated in args.tasks:
cmd.extend(['--' + name, Archive(isolated, args.isolate_server)])
# Test controller args
for name, value in args.controller_vars:
cmd.extend(['--' + name, value])
print ' '.join(cmd)
return cmd
def main():
args = GetArgs()
logging.basicConfig(
format='%(asctime)s %(filename)s:%(lineno)s %(levelname)s] %(message)s',
datefmt='%H:%M:%S',
level=LOGGING_LEVELS[len(LOGGING_LEVELS)-args.verbosity-1])
cmd = GetSwarmingCommandLine(args)
if not args.format_only:
RunCommand(cmd, True)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | -1,864,788,379,506,770,200 | 5,048,768,242,900,927,000 | 34.850932 | 80 | 0.638947 | false |
DPaaS-Raksha/horizon | openstack_dashboard/dashboards/project/loadbalancers/tabs.py | 9 | 5153 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from django.utils.translation import ugettext as _
from horizon import exceptions
from horizon import tabs
from horizon import tables
from openstack_dashboard import api
from .tables import PoolsTable, MembersTable, MonitorsTable
class PoolsTab(tabs.TableTab):
table_classes = (PoolsTable,)
name = _("Pools")
slug = "pools"
template_name = "horizon/common/_detail_table.html"
def get_poolstable_data(self):
try:
pools = api.lbaas.pools_get(self.tab_group.request)
poolsFormatted = [p.readable(self.tab_group.request) for
p in pools]
except:
poolsFormatted = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve pools list.'))
return poolsFormatted
class MembersTab(tabs.TableTab):
table_classes = (MembersTable,)
name = _("Members")
slug = "members"
template_name = "horizon/common/_detail_table.html"
def get_memberstable_data(self):
try:
members = api.lbaas.members_get(self.tab_group.request)
membersFormatted = [m.readable(self.tab_group.request) for
m in members]
except:
membersFormatted = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve member list.'))
return membersFormatted
class MonitorsTab(tabs.TableTab):
table_classes = (MonitorsTable,)
name = _("Monitors")
slug = "monitors"
template_name = "horizon/common/_detail_table.html"
def get_monitorstable_data(self):
try:
monitors = api.lbaas.pool_health_monitors_get(
self.tab_group.request)
except:
monitors = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve monitor list.'))
return monitors
class LoadBalancerTabs(tabs.TabGroup):
slug = "lbtabs"
tabs = (PoolsTab, MembersTab, MonitorsTab)
sticky = True
class PoolDetailsTab(tabs.Tab):
name = _("Pool Details")
slug = "pooldetails"
template_name = "project/loadbalancers/_pool_details.html"
def get_context_data(self, request):
pid = self.tab_group.kwargs['pool_id']
try:
pool = api.lbaas.pool_get(request, pid)
except:
pool = []
exceptions.handle(request,
_('Unable to retrieve pool details.'))
return {'pool': pool}
class VipDetailsTab(tabs.Tab):
name = _("Vip Details")
slug = "vipdetails"
template_name = "project/loadbalancers/_vip_details.html"
def get_context_data(self, request):
vid = self.tab_group.kwargs['vip_id']
try:
vip = api.lbaas.vip_get(request, vid)
except:
vip = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve vip details.'))
return {'vip': vip}
class MemberDetailsTab(tabs.Tab):
name = _("Member Details")
slug = "memberdetails"
template_name = "project/loadbalancers/_member_details.html"
def get_context_data(self, request):
mid = self.tab_group.kwargs['member_id']
try:
member = api.lbaas.member_get(request, mid)
except:
member = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve member details.'))
return {'member': member}
class MonitorDetailsTab(tabs.Tab):
name = _("Monitor Details")
slug = "monitordetails"
template_name = "project/loadbalancers/_monitor_details.html"
def get_context_data(self, request):
mid = self.tab_group.kwargs['monitor_id']
try:
monitor = api.lbaas.pool_health_monitor_get(request, mid)
except:
monitor = []
exceptions.handle(self.tab_group.request,
_('Unable to retrieve monitor details.'))
return {'monitor': monitor}
class PoolDetailsTabs(tabs.TabGroup):
slug = "pooltabs"
tabs = (PoolDetailsTab,)
class VipDetailsTabs(tabs.TabGroup):
slug = "viptabs"
tabs = (VipDetailsTab,)
class MemberDetailsTabs(tabs.TabGroup):
slug = "membertabs"
tabs = (MemberDetailsTab,)
class MonitorDetailsTabs(tabs.TabGroup):
slug = "monitortabs"
tabs = (MonitorDetailsTab,)
| apache-2.0 | 5,372,452,471,159,941,000 | 6,509,256,602,446,043,000 | 29.311765 | 78 | 0.61013 | false |
edevil/django | django/contrib/auth/middleware.py | 172 | 5116 | from django.contrib import auth
from django.contrib.auth import load_backend
from django.contrib.auth.backends import RemoteUserBackend
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import SimpleLazyObject
def get_user(request):
if not hasattr(request, '_cached_user'):
request._cached_user = auth.get_user(request)
return request._cached_user
class AuthenticationMiddleware(object):
def process_request(self, request):
assert hasattr(request, 'session'), (
"The Django authentication middleware requires session middleware "
"to be installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.sessions.middleware.SessionMiddleware' before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
)
request.user = SimpleLazyObject(lambda: get_user(request))
class SessionAuthenticationMiddleware(object):
"""
Formerly, a middleware for invalidating a user's sessions that don't
correspond to the user's current session authentication hash. However, it
caused the "Vary: Cookie" header on all responses.
Now a backwards compatibility shim that enables session verification in
auth.get_user() if this middleware is in MIDDLEWARE_CLASSES.
"""
def process_request(self, request):
pass
class RemoteUserMiddleware(object):
"""
Middleware for utilizing Web-server-provided authentication.
If request.user is not authenticated, then this middleware attempts to
authenticate the username passed in the ``REMOTE_USER`` request header.
If authentication is successful, the user is automatically logged in to
persist the user in the session.
The header used is configurable and defaults to ``REMOTE_USER``. Subclass
this class and change the ``header`` attribute if you need to use a
different header.
"""
# Name of request header to grab username from. This will be the key as
# used in the request.META dictionary, i.e. the normalization of headers to
# all uppercase and the addition of "HTTP_" prefix apply.
header = "REMOTE_USER"
def process_request(self, request):
# AuthenticationMiddleware is required so that request.user exists.
if not hasattr(request, 'user'):
raise ImproperlyConfigured(
"The Django remote user auth middleware requires the"
" authentication middleware to be installed. Edit your"
" MIDDLEWARE_CLASSES setting to insert"
" 'django.contrib.auth.middleware.AuthenticationMiddleware'"
" before the RemoteUserMiddleware class.")
try:
username = request.META[self.header]
except KeyError:
# If specified header doesn't exist then remove any existing
# authenticated remote-user, or return (leaving request.user set to
# AnonymousUser by the AuthenticationMiddleware).
if request.user.is_authenticated():
self._remove_invalid_user(request)
return
# If the user is already authenticated and that user is the user we are
# getting passed in the headers, then the correct user is already
# persisted in the session and we don't need to continue.
if request.user.is_authenticated():
if request.user.get_username() == self.clean_username(username, request):
return
else:
# An authenticated user is associated with the request, but
# it does not match the authorized user in the header.
self._remove_invalid_user(request)
# We are seeing this user for the first time in this session, attempt
# to authenticate the user.
user = auth.authenticate(remote_user=username)
if user:
# User is valid. Set request.user and persist user in the session
# by logging the user in.
request.user = user
auth.login(request, user)
def clean_username(self, username, request):
"""
Allows the backend to clean the username, if the backend defines a
clean_username method.
"""
backend_str = request.session[auth.BACKEND_SESSION_KEY]
backend = auth.load_backend(backend_str)
try:
username = backend.clean_username(username)
except AttributeError: # Backend has no clean_username method.
pass
return username
def _remove_invalid_user(self, request):
"""
Removes the current authenticated user in the request which is invalid
but only if the user is authenticated via the RemoteUserBackend.
"""
try:
stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, ''))
except ImportError:
# backend failed to load
auth.logout(request)
else:
if isinstance(stored_backend, RemoteUserBackend):
auth.logout(request)
| bsd-3-clause | -435,194,016,689,135,600 | 2,708,625,090,878,712,000 | 41.633333 | 92 | 0.663018 | false |
m7thon/tom | python/tests/sequence_test.py | 1 | 12007 | import unittest
import tom
class PlainPySequence:
"""Simple list of symbols: [o_0, ..., o_{N-1}]."""
def __init__(self, data):
if type(data) is list:
self.data = data
else:
self.data = list(range(data))
def __repr__(self):
return str(self.data)
def length(self):
return len(self.data)
def raw(self):
return self
def equals(self, tom_seq):
if tom_seq.rawSize() != len(self.data):
return False
for idx in range(len(self.data)):
if self.data[idx] != tom_seq.rawAt(idx):
return False
return True
def o(self, idx):
return self.data[idx]
def u(self, idx):
return 0
def sub(self, idx, length):
return PlainPySequence(self.data[idx:idx + length:1 if length > 0 else -1])
rawSub = sub
def slice(self, begin, end, forwards):
if begin == tom.NoIndex:
begin = None
if end == tom.NoIndex:
end = None
return PlainPySequence(self.data[begin:end:1 if forwards else -1])
rawSlice = slice
def reverse(self):
return PlainPySequence(list(reversed(self.data)))
def io(self, rev):
if self.length() == 0:
ret = IoPySequence([])
ret.reversed = rev
return ret
dat = self.data[:]
if (not rev and dat[0] > 0) or (rev and dat[0] < 0):
dat = [None] + dat
if len(dat) % 2 != 0:
dat = dat + [None]
i = dat[::2]
o = dat[1::2]
if rev:
i, o = o, i
ret = IoPySequence(list(zip(i, o)))
ret.reversed = rev
return ret
class IoPySequence:
""" A list of io-pairs: [(u_0, o_0), ..., (u_{N-1}, o_{N-1})]. Note that for testing inputs will be negative
and outputs positive!"""
def __init__(self, data):
if type(data) is list:
self.data = data
else:
self.data = list(zip(range(-1, -(data + 1), -1), range(1, data + 1, 1)))
self.reversed = False
def __repr__(self):
return str(self.data)
def raw(self):
if not self.reversed:
return PlainPySequence([i for pair in self.data for i in pair if i is not None])
else:
return PlainPySequence([i for pair in self.data for i in reversed(pair) if i is not None])
def length(self):
return len(self.data)
def equals(self, tom_seq):
if not tom_seq.isIO():
return False
if tom_seq.length() != len(self.data):
return False
if len(self.data) > 0 and tom_seq.isReversed() != self.reversed:
return False
return self.raw().equals(tom_seq)
def o(self, idx):
return self.data[idx][1]
def u(self, idx):
return self.data[idx][0]
def reverse(self):
rev = IoPySequence(list(reversed(self.data)))
rev.reversed = not self.reversed
return rev
def sub(self, idx, length):
ret = IoPySequence(self.data[idx:idx + length:1 if length > 0 else -1])
if length < 0:
ret.reversed = not self.reversed
else:
ret.reversed = self.reversed
return ret
def slice(self, begin, end, forwards):
if begin == tom.NoIndex:
begin = None
if end == tom.NoIndex:
end = None
ret = IoPySequence(self.data[begin:end:1 if forwards else -1])
if not forwards:
ret.reversed = not self.reversed
else:
ret.reversed = self.reversed
return ret
def rawSub(self, idx, length):
return self.raw().sub(idx, length).io((self.reversed) == (length >= 0))
def rawSlice(self, begin, end, forwards):
return self.raw().slice(begin, end, forwards).io(self.reversed == forwards)
class TestSequence(unittest.TestCase):
def create_subs(self):
pass
def cases(self):
size = 5
l = list(range(1, size + 1))
py_base = PlainPySequence(l)
tom_base = tom.Sequence(l, 10, 0)
for b in range(size):
for e in range(b, size + 1):
try:
tom_seq = tom_base.rawSlice(b, e, True)
py_seq = py_base.slice(b, e, True)
except:
self.assertTrue(False, "Error creating test cases by rawSlice by " + str(tom_base) + " [%d:%d:%d]" % (b, e, True))
self.assertTrue(py_seq.equals(tom_seq), "Error creating test cases by rawSlice for " + str(tom_seq) + " and " + str(py_seq))
yield (tom_seq, py_seq)
py_seq = py_seq.reverse()
tom_seq = tom_seq.reverse()
self.assertTrue(py_seq.equals(tom_seq), "Error creating test cases by rawSlice for " + str(tom_seq) + " and " + str(py_seq))
yield (tom_seq, py_seq)
i = list(range(-1, -size - 1, -1))
o = list(range(1, size + 1, 1))
l = list(zip(i, o))
x = []
for p in l:
x.extend(p)
py_base = IoPySequence(l)
tom_base = tom.Sequence(x, 10, 1)
for b in range(2 * size):
for e in range(b, 2 * size + 1):
tom_seq = tom_base.rawSlice(b, e, True)
py_seq = py_base.rawSlice(b, e, True)
self.assertTrue(py_seq.equals(tom_seq), "Error creating test cases by rawSlice for " + str(tom_seq) + " and " + str(py_seq))
yield (tom_seq, py_seq)
py_seq = py_seq.reverse()
tom_seq = tom_seq.reverse()
self.assertTrue(py_seq.equals(tom_seq), "Error creating test cases by rawSlice for " + str(tom_seq) + " and " + str(py_seq))
yield (tom_seq, py_seq)
def test_json_io(self):
for tom_seq, py_seq in self.cases():
seq = tom.Sequence(tom_seq.toJSON())
self.assertTrue(seq == tom_seq, "to and from json gives non-equal sequence for" + str(tom_seq) + " and " + str(seq))
self.assertTrue(seq.nInputSymbols() == tom_seq.nInputSymbols(), "alphabet changed: " + str(tom_seq) + " and " + str(seq))
self.assertTrue(seq.nOutputSymbols() == tom_seq.nOutputSymbols(), "alphabet changed: " + str(tom_seq) + " and " + str(seq))
json = """{"Type":"Sequence","nU":1,"nO":10,"data":[-1,1,-2,2],"size":4}"""
tom_seq = tom.Sequence(json)
py_seq = IoPySequence([(-1, 1), (-2, 2)])
self.assertTrue(tom_seq.nInputSymbols() == 1 and tom_seq.nOutputSymbols() == 10 and py_seq.equals(tom_seq), "Error reading simple json-string")
def test_copy(self):
for tom_seq, py_seq in self.cases():
seq = tom_seq.copy()
self.assertTrue(seq == tom_seq, ".copy() not equal:" + str(tom_seq) + " and " + str(seq))
self.assertTrue(seq.nInputSymbols() == tom_seq.nInputSymbols(), "alphabet changed: " + str(tom_seq) + " and " + str(seq))
self.assertTrue(seq.nOutputSymbols() == tom_seq.nOutputSymbols(), "alphabet changed: " + str(tom_seq) + " and " + str(seq))
def test_accessors(self):
for tom_seq, py_seq in self.cases():
for idx in range(-py_seq.length(), py_seq.length()):
try:
self.assertTrue(tom_seq.o(idx) == py_seq.o(idx), ".o(%d) not correct: " % idx + str(tom_seq) + " and " + str(py_seq))
except:
if py_seq.o(idx) is not None:
self.assertTrue(False, ".o(%d) should be %d: " % (idx, py_seq.o(idx)) + str(tom_seq) + " and " + str(py_seq))
try:
self.assertTrue(tom_seq.u(idx) == py_seq.u(idx), ".u(%d) not correct: " % idx + str(tom_seq) + " and " + str(py_seq))
except:
if py_seq.u(idx) is not None:
self.assertTrue(False, ".u(%d) should be %d: " % (idx, py_seq.u(idx)) + str(tom_seq) + " and " + str(py_seq))
for idx in range(-py_seq.raw().length(), py_seq.raw().length()):
self.assertTrue(tom_seq.rawAt(idx) == py_seq.raw().o(idx), "Error with rawAt: " + str(tom_seq))
self.assertTrue(tom_seq.rawAt(idx) == tom_seq[idx], "Error with python []: " + str(tom_seq))
self.assertTrue(list(tom_seq) == py_seq.raw().data, "Error with python iterator access: " + str(tom_seq))
def test_rawSub(self):
for tom_seq, py_seq in self.cases():
for idx in list(range(py_seq.raw().length())):
for l in list(range(py_seq.raw().length()-idx)):
self.assertTrue(py_seq.rawSub(idx, l).equals(tom_seq.rawSub(idx, l)), "Sub error: " + str(tom_seq) + " [%d:%d]" % (idx, l))
for l in list(range(-1, -idx-1, -1)):
self.assertTrue(py_seq.rawSub(idx, l).equals(tom_seq.rawSub(idx, l)), "Sub error: " + str(tom_seq) + " [%d:%d]" % (idx, l))
def test_sub(self):
for tom_seq, py_seq in self.cases():
for idx in list(range(py_seq.length())):
for l in list(range(py_seq.length()-idx)):
self.assertTrue(py_seq.sub(idx, l).equals(tom_seq.sub(idx, l)), "Sub error: " + str(tom_seq) + " [%d:%d]" % (idx, l))
for l in list(range(-1, -idx-1, -1)):
self.assertTrue(py_seq.sub(idx, l).equals(tom_seq.sub(idx, l)), "Sub error: " + str(tom_seq) + " [%d:%d]" % (idx, l))
def test_rawSlice(self):
for tom_seq, py_seq in self.cases():
for b in [tom.NoIndex] + list(range(py_seq.raw().length())):
if b == tom.NoIndex:
es = [tom.NoIndex] + list(range(py_seq.raw().length() + 1))
else:
es = [tom.NoIndex] + list(range(b, py_seq.raw().length()+1))
for e in es:
self.assertTrue(py_seq.rawSlice(b, e, True).equals(tom_seq.rawSlice(b, e, True)), "Slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, True))
self.assertTrue(tom_seq.rawSlice(b, e, True) == tom_seq[None if b == tom.NoIndex else b:None if e == tom.NoIndex else e],
"Python-slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, True))
for b in [tom.NoIndex] + list(range(py_seq.raw().length())):
if b == tom.NoIndex:
es = [tom.NoIndex] + list(range(py_seq.raw().length()))
else:
es = [tom.NoIndex] + list(range(0, b+1))
for e in es:
self.assertTrue(py_seq.rawSlice(b, e, False).equals(tom_seq.rawSlice(b, e, False)), "Slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, False))
self.assertTrue(tom_seq.rawSlice(b, e, False) == tom_seq[None if b == tom.NoIndex else b:None if e == tom.NoIndex else e:-1],
"Python-slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, False))
def test_slice(self):
for tom_seq, py_seq in self.cases():
for b in [tom.NoIndex] + list(range(py_seq.length())):
if b == tom.NoIndex:
es = [tom.NoIndex] + list(range(py_seq.length() + 1))
else:
es = [tom.NoIndex] + list(range(b, py_seq.length()+1))
for e in es:
self.assertTrue(py_seq.slice(b, e, True).equals(tom_seq.slice(b, e, True)), "Slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, True))
for b in [tom.NoIndex] + list(range(py_seq.length())):
if b == tom.NoIndex:
es = [tom.NoIndex] + list(range(py_seq.length()))
else:
es = [tom.NoIndex] + list(range(0, b+1))
for e in es:
self.assertTrue(py_seq.slice(b, e, False).equals(tom_seq.slice(b, e, False)), "Slicing error: " + str(tom_seq) + " [%d:%d:%d]" % (b, e, False))
| mit | 862,190,530,162,204,200 | 257,491,586,637,695,000 | 42.981685 | 169 | 0.513034 | false |
alabarga/SocialLearning | SocialLearning/apps/learningobjects/management/commands/describe.py | 1 | 6166 | #-*- coding: UTF-8 -*-
from learningobjects.utils.search import *
from learningobjects.utils.parsers import *
from django.core.management.base import BaseCommand, CommandError
from learningobjects.models import *
from optparse import make_option
from learningobjects.utils import feedfinder
from learningobjects.management.commands import add
from django.core.management import call_command
import feedparser
from learningobjects.utils.parsers import ReadibilityParser
from learningobjects.utils.alchemyapi import AlchemyAPI
from learningobjects.utils import slideshare
from textblob import TextBlob
# pip install -U textblob
# python -m textblob.download_corpora
import sys, traceback
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-q','--query',
dest='query',
help='Palabra clave a buscar'),
)+(
make_option('-u','--url',
dest='URL',
help='URL del recurso'),
)
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('-u','--url',
dest='URL',
help='URL del recurso')
#parser.add_argument('poll_id', nargs='+', type=int)
# Named (optional) arguments
parser.add_argument('--verbose',
action='store_true',
dest='verbose',
default=False,
help='Log actions to console')
def handle(self, *args, **options):
results=[]
alchemyapi = AlchemyAPI()
if options['URL'] == None:
inp=raw_input("This will describe EVERY Resource with ´Added´ status on the database. Are you sure(y/n)?:")
inp=inp.lower()
if inp=="y" or inp=="yes":
topics = Topic.objects.all()
#resources=Resource.objects.filter(status=Resource.ADDED)
col = Collection.objects.get(pk=23)
resources = col.resources.all()
slides = slideshare.SlideshareAPI()
for res in resources:
try:
url = res.url
res.identifier = hashlib.md5(url).hexdigest()
u = URLObject(url)
print "%s (%s)" % (u.url, u.content_type)
if 'application/pdf' in u.content_type:
pd = PDFParser(url).describe()
res.fulltext = pd.fulltext
res.content_type = 'PDF'
# slideshare
elif bool(re.match('^(http(s|):\/\/|)(www.|)slideshare.net',u.url)):
s = slides.get_slideshow(slideshow_url=u.url)
res.title = slide['Slideshow']['Title']
res.description = slide['Slideshow']['Description']
res.author = slide['Slideshow']['Username']
res.fulltext = slide['Slideshow']['Embed']
res.interest = int(slide['Slideshow']['NumViews']) + int(slide['Slideshow']['NumFavorites']) + int(slide['Slideshow']['NumDownloads'])
rc_url = 'https://www.slideshare.net/' + slide['Slideshow']['Username'] + '/presentations'
rc_rss = 'http://es.slideshare.net/rss/user/' + slide['Slideshow']['Username']
rc, created = ResourceContainer.objects.get_or_create(url=rc_url, rss=rc_rss, name=slide['Slideshow']['Username'] )
rc.resources.add(res)
# youtube
elif bool(re.match('^(http(s|):\/\/|)(www.|)youtube.com',u.url)):
yt_desc = YoutubeParser(url).describe()
res.title = yt_desc.title
res.description = yt_desc.description
res.interest = yt_desc.viewcount
res.content_type = 'VIDEO'
res.author = yt_desc.username
rc_url = 'https://www.youtube.com/user/' + yt_desc.username
rc_rss = 'http://gdata.youtube.com/feeds/api/users/' + yt_desc.username + '/uploads'
rc = ResourceContainer.objects.get_or_create(url=rc_url, rss=rc_rss, name=yt_desc.username )
rc.resources.add(res)
elif 'text/html' in u.content_type:
rp_desc = ReadibilityParser(url).describe()
gp_desc = GooseParser(url).describe()
sm_desc = SummaryParser(url).describe()
res.title = rp_desc.title
res.description = sm_desc.summary
res.fulltext = gp_desc.text
np = TextBlob(gp_desc.text)
res.language = np.detect_language()
res.author = rp_desc.author
res.content_type = 'WEB'
else:
continue
#for t in topics:
# rel, created = Relevance.objects.get_or_create(resource=res, topic=t)
# rel.score = random.random()
res.status=Resource.DESCRIBED
res.save()
except:
traceback.print_exc(file=sys.stdout)
else:
url=options['URL']
resource=Resource.objects.filter(url=url,status=Resource.ADDED)
if len(resource)>0:
data = ReadibilityParser.describe(url)
resource.update(status=Resource.DESCRIBED)
else:
print "That link is not in the database or is not with ´Added´ status. Add it first (python manage.py add -u "+url+")"
| gpl-3.0 | 6,507,754,186,784,852,000 | -554,196,459,109,482,600 | 41.496552 | 162 | 0.493833 | false |
SoteriousIdaofevil/xmlstar | mingw/libxml2-2.9.1/python/tests/xpathleak.py | 30 | 1563 | #!/usr/bin/python
import sys, libxml2
libxml2.debugMemory(True)
expect="""--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
--> Invalid expression
--> xmlXPathEval: evaluation failed
"""
err=""
def callback(ctx, str):
global err
err = err + "%s %s" % (ctx, str)
libxml2.registerErrorHandler(callback, "-->")
doc = libxml2.parseDoc("<fish/>")
ctxt = doc.xpathNewContext()
ctxt.setContextNode(doc)
badexprs = (
":false()", "bad:()", "bad(:)", ":bad(:)", "bad:(:)", "bad:bad(:)",
"a:/b", "/c:/d", "//e:/f", "g://h"
)
for expr in badexprs:
try:
ctxt.xpathEval(expr)
except libxml2.xpathError:
pass
else:
print("Unexpectedly legal expression:", expr)
ctxt.xpathFreeContext()
doc.freeDoc()
if err != expect:
print("error")
print("received %s" %(err))
print("expected %s" %(expect))
sys.exit(1)
libxml2.cleanupParser()
leakedbytes = libxml2.debugMemory(True)
if leakedbytes == 0:
print("OK")
else:
print("Memory leak", leakedbytes, "bytes")
# drop file to .memdump file in cwd, but won't work if not compiled in
libxml2.dumpMemory()
| mit | -5,058,315,396,696,640,000 | 4,745,587,779,916,686,000 | 23.046154 | 71 | 0.682022 | false |
ModdedPA/android_external_chromium_org | third_party/closure_linter/closure_linter/error_fixer.py | 135 | 17305 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main class responsible for automatically fixing simple style violations."""
__author__ = '[email protected] (Robert Walker)'
import re
import gflags as flags
from closure_linter import errors
from closure_linter import javascriptstatetracker
from closure_linter import javascripttokens
from closure_linter import requireprovidesorter
from closure_linter import tokenutil
from closure_linter.common import errorhandler
# Shorthand
Token = javascripttokens.JavaScriptToken
Type = javascripttokens.JavaScriptTokenType
END_OF_FLAG_TYPE = re.compile(r'(}?\s*)$')
# Regex to represent common mistake inverting author name and email as
# @author User Name (user@company)
INVERTED_AUTHOR_SPEC = re.compile(r'(?P<leading_whitespace>\s*)'
'(?P<name>[^(]+)'
'(?P<whitespace_after_name>\s+)'
'\('
'(?P<email>[^\s]+@[^)\s]+)'
'\)'
'(?P<trailing_characters>.*)')
FLAGS = flags.FLAGS
flags.DEFINE_boolean('disable_indentation_fixing', False,
'Whether to disable automatic fixing of indentation.')
class ErrorFixer(errorhandler.ErrorHandler):
"""Object that fixes simple style errors."""
def __init__(self, external_file=None):
"""Initialize the error fixer.
Args:
external_file: If included, all output will be directed to this file
instead of overwriting the files the errors are found in.
"""
errorhandler.ErrorHandler.__init__(self)
self._file_name = None
self._file_token = None
self._external_file = external_file
def HandleFile(self, filename, first_token):
"""Notifies this ErrorPrinter that subsequent errors are in filename.
Args:
filename: The name of the file about to be checked.
first_token: The first token in the file.
"""
self._file_name = filename
self._file_token = first_token
self._file_fix_count = 0
self._file_changed_lines = set()
def _AddFix(self, tokens):
"""Adds the fix to the internal count.
Args:
tokens: The token or sequence of tokens changed to fix an error.
"""
self._file_fix_count += 1
if hasattr(tokens, 'line_number'):
self._file_changed_lines.add(tokens.line_number)
else:
for token in tokens:
self._file_changed_lines.add(token.line_number)
def HandleError(self, error):
"""Attempts to fix the error.
Args:
error: The error object
"""
code = error.code
token = error.token
if code == errors.JSDOC_PREFER_QUESTION_TO_PIPE_NULL:
iterator = token.attached_object.type_start_token
if iterator.type == Type.DOC_START_BRACE or iterator.string.isspace():
iterator = iterator.next
leading_space = len(iterator.string) - len(iterator.string.lstrip())
iterator.string = '%s?%s' % (' ' * leading_space,
iterator.string.lstrip())
# Cover the no outer brace case where the end token is part of the type.
while iterator and iterator != token.attached_object.type_end_token.next:
iterator.string = iterator.string.replace(
'null|', '').replace('|null', '')
iterator = iterator.next
# Create a new flag object with updated type info.
token.attached_object = javascriptstatetracker.JsDocFlag(token)
self._AddFix(token)
elif code == errors.JSDOC_MISSING_OPTIONAL_TYPE:
iterator = token.attached_object.type_end_token
if iterator.type == Type.DOC_END_BRACE or iterator.string.isspace():
iterator = iterator.previous
ending_space = len(iterator.string) - len(iterator.string.rstrip())
iterator.string = '%s=%s' % (iterator.string.rstrip(),
' ' * ending_space)
# Create a new flag object with updated type info.
token.attached_object = javascriptstatetracker.JsDocFlag(token)
self._AddFix(token)
elif code in (errors.MISSING_SEMICOLON_AFTER_FUNCTION,
errors.MISSING_SEMICOLON):
semicolon_token = Token(';', Type.SEMICOLON, token.line,
token.line_number)
tokenutil.InsertTokenAfter(semicolon_token, token)
token.metadata.is_implied_semicolon = False
semicolon_token.metadata.is_implied_semicolon = False
self._AddFix(token)
elif code in (errors.ILLEGAL_SEMICOLON_AFTER_FUNCTION,
errors.REDUNDANT_SEMICOLON,
errors.COMMA_AT_END_OF_LITERAL):
tokenutil.DeleteToken(token)
self._AddFix(token)
elif code == errors.INVALID_JSDOC_TAG:
if token.string == '@returns':
token.string = '@return'
self._AddFix(token)
elif code == errors.FILE_MISSING_NEWLINE:
# This error is fixed implicitly by the way we restore the file
self._AddFix(token)
elif code == errors.MISSING_SPACE:
if error.position:
if error.position.IsAtBeginning():
tokenutil.InsertSpaceTokenAfter(token.previous)
elif error.position.IsAtEnd(token.string):
tokenutil.InsertSpaceTokenAfter(token)
else:
token.string = error.position.Set(token.string, ' ')
self._AddFix(token)
elif code == errors.EXTRA_SPACE:
if error.position:
token.string = error.position.Set(token.string, '')
self._AddFix(token)
elif code == errors.JSDOC_TAG_DESCRIPTION_ENDS_WITH_INVALID_CHARACTER:
token.string = error.position.Set(token.string, '.')
self._AddFix(token)
elif code == errors.MISSING_LINE:
if error.position.IsAtBeginning():
tokenutil.InsertBlankLineAfter(token.previous)
else:
tokenutil.InsertBlankLineAfter(token)
self._AddFix(token)
elif code == errors.EXTRA_LINE:
tokenutil.DeleteToken(token)
self._AddFix(token)
elif code == errors.WRONG_BLANK_LINE_COUNT:
if not token.previous:
# TODO(user): Add an insertBefore method to tokenutil.
return
num_lines = error.fix_data
should_delete = False
if num_lines < 0:
num_lines *= -1
should_delete = True
for i in xrange(1, num_lines + 1):
if should_delete:
# TODO(user): DeleteToken should update line numbers.
tokenutil.DeleteToken(token.previous)
else:
tokenutil.InsertBlankLineAfter(token.previous)
self._AddFix(token)
elif code == errors.UNNECESSARY_DOUBLE_QUOTED_STRING:
end_quote = tokenutil.Search(token, Type.DOUBLE_QUOTE_STRING_END)
if end_quote:
single_quote_start = Token(
"'", Type.SINGLE_QUOTE_STRING_START, token.line, token.line_number)
single_quote_end = Token(
"'", Type.SINGLE_QUOTE_STRING_START, end_quote.line,
token.line_number)
tokenutil.InsertTokenAfter(single_quote_start, token)
tokenutil.InsertTokenAfter(single_quote_end, end_quote)
tokenutil.DeleteToken(token)
tokenutil.DeleteToken(end_quote)
self._AddFix([token, end_quote])
elif code == errors.MISSING_BRACES_AROUND_TYPE:
fixed_tokens = []
start_token = token.attached_object.type_start_token
if start_token.type != Type.DOC_START_BRACE:
leading_space = (
len(start_token.string) - len(start_token.string.lstrip()))
if leading_space:
start_token = tokenutil.SplitToken(start_token, leading_space)
# Fix case where start and end token were the same.
if token.attached_object.type_end_token == start_token.previous:
token.attached_object.type_end_token = start_token
new_token = Token('{', Type.DOC_START_BRACE, start_token.line,
start_token.line_number)
tokenutil.InsertTokenAfter(new_token, start_token.previous)
token.attached_object.type_start_token = new_token
fixed_tokens.append(new_token)
end_token = token.attached_object.type_end_token
if end_token.type != Type.DOC_END_BRACE:
# If the start token was a brace, the end token will be a
# FLAG_ENDING_TYPE token, if there wasn't a starting brace then
# the end token is the last token of the actual type.
last_type = end_token
if not fixed_tokens:
last_type = end_token.previous
while last_type.string.isspace():
last_type = last_type.previous
# If there was no starting brace then a lone end brace wouldn't have
# been type end token. Now that we've added any missing start brace,
# see if the last effective type token was an end brace.
if last_type.type != Type.DOC_END_BRACE:
trailing_space = (len(last_type.string) -
len(last_type.string.rstrip()))
if trailing_space:
tokenutil.SplitToken(last_type,
len(last_type.string) - trailing_space)
new_token = Token('}', Type.DOC_END_BRACE, last_type.line,
last_type.line_number)
tokenutil.InsertTokenAfter(new_token, last_type)
token.attached_object.type_end_token = new_token
fixed_tokens.append(new_token)
self._AddFix(fixed_tokens)
elif code == errors.GOOG_REQUIRES_NOT_ALPHABETIZED:
require_start_token = error.fix_data
sorter = requireprovidesorter.RequireProvideSorter()
sorter.FixRequires(require_start_token)
self._AddFix(require_start_token)
elif code == errors.GOOG_PROVIDES_NOT_ALPHABETIZED:
provide_start_token = error.fix_data
sorter = requireprovidesorter.RequireProvideSorter()
sorter.FixProvides(provide_start_token)
self._AddFix(provide_start_token)
elif code == errors.UNNECESSARY_BRACES_AROUND_INHERIT_DOC:
if token.previous.string == '{' and token.next.string == '}':
tokenutil.DeleteToken(token.previous)
tokenutil.DeleteToken(token.next)
self._AddFix([token])
elif code == errors.INVALID_AUTHOR_TAG_DESCRIPTION:
match = INVERTED_AUTHOR_SPEC.match(token.string)
if match:
token.string = '%s%s%s(%s)%s' % (match.group('leading_whitespace'),
match.group('email'),
match.group('whitespace_after_name'),
match.group('name'),
match.group('trailing_characters'))
self._AddFix(token)
elif (code == errors.WRONG_INDENTATION and
not FLAGS.disable_indentation_fixing):
token = tokenutil.GetFirstTokenInSameLine(token)
actual = error.position.start
expected = error.position.length
if token.type in (Type.WHITESPACE, Type.PARAMETERS) and actual != 0:
token.string = token.string.lstrip() + (' ' * expected)
self._AddFix([token])
else:
# We need to add indentation.
new_token = Token(' ' * expected, Type.WHITESPACE,
token.line, token.line_number)
# Note that we'll never need to add indentation at the first line,
# since it will always not be indented. Therefore it's safe to assume
# token.previous exists.
tokenutil.InsertTokenAfter(new_token, token.previous)
self._AddFix([token])
elif code in [errors.MALFORMED_END_OF_SCOPE_COMMENT,
errors.MISSING_END_OF_SCOPE_COMMENT]:
# Only fix cases where }); is found with no trailing content on the line
# other than a comment. Value of 'token' is set to } for this error.
if (token.type == Type.END_BLOCK and
token.next.type == Type.END_PAREN and
token.next.next.type == Type.SEMICOLON):
current_token = token.next.next.next
removed_tokens = []
while current_token and current_token.line_number == token.line_number:
if current_token.IsAnyType(Type.WHITESPACE,
Type.START_SINGLE_LINE_COMMENT,
Type.COMMENT):
removed_tokens.append(current_token)
current_token = current_token.next
else:
return
if removed_tokens:
tokenutil.DeleteTokens(removed_tokens[0], len(removed_tokens))
whitespace_token = Token(' ', Type.WHITESPACE, token.line,
token.line_number)
start_comment_token = Token('//', Type.START_SINGLE_LINE_COMMENT,
token.line, token.line_number)
comment_token = Token(' goog.scope', Type.COMMENT, token.line,
token.line_number)
insertion_tokens = [whitespace_token, start_comment_token,
comment_token]
tokenutil.InsertTokensAfter(insertion_tokens, token.next.next)
self._AddFix(removed_tokens + insertion_tokens)
elif code in [errors.EXTRA_GOOG_PROVIDE, errors.EXTRA_GOOG_REQUIRE]:
tokens_in_line = tokenutil.GetAllTokensInSameLine(token)
tokenutil.DeleteTokens(tokens_in_line[0], len(tokens_in_line))
self._AddFix(tokens_in_line)
elif code in [errors.MISSING_GOOG_PROVIDE, errors.MISSING_GOOG_REQUIRE]:
is_provide = code == errors.MISSING_GOOG_PROVIDE
is_require = code == errors.MISSING_GOOG_REQUIRE
missing_namespaces = error.fix_data[0]
need_blank_line = error.fix_data[1]
if need_blank_line is None:
# TODO(user): This happens when there are no existing
# goog.provide or goog.require statements to position new statements
# relative to. Consider handling this case with a heuristic.
return
insert_location = token.previous
# If inserting a missing require with no existing requires, insert a
# blank line first.
if need_blank_line and is_require:
tokenutil.InsertBlankLineAfter(insert_location)
insert_location = insert_location.next
for missing_namespace in missing_namespaces:
new_tokens = self._GetNewRequireOrProvideTokens(
is_provide, missing_namespace, insert_location.line_number + 1)
tokenutil.InsertLineAfter(insert_location, new_tokens)
insert_location = new_tokens[-1]
self._AddFix(new_tokens)
# If inserting a missing provide with no existing provides, insert a
# blank line after.
if need_blank_line and is_provide:
tokenutil.InsertBlankLineAfter(insert_location)
def _GetNewRequireOrProvideTokens(self, is_provide, namespace, line_number):
"""Returns a list of tokens to create a goog.require/provide statement.
Args:
is_provide: True if getting tokens for a provide, False for require.
namespace: The required or provided namespaces to get tokens for.
line_number: The line number the new require or provide statement will be
on.
Returns:
Tokens to create a new goog.require or goog.provide statement.
"""
string = 'goog.require'
if is_provide:
string = 'goog.provide'
line_text = string + '(\'' + namespace + '\');\n'
return [
Token(string, Type.IDENTIFIER, line_text, line_number),
Token('(', Type.START_PAREN, line_text, line_number),
Token('\'', Type.SINGLE_QUOTE_STRING_START, line_text, line_number),
Token(namespace, Type.STRING_TEXT, line_text, line_number),
Token('\'', Type.SINGLE_QUOTE_STRING_END, line_text, line_number),
Token(')', Type.END_PAREN, line_text, line_number),
Token(';', Type.SEMICOLON, line_text, line_number)
]
def FinishFile(self):
"""Called when the current file has finished style checking.
Used to go back and fix any errors in the file.
"""
if self._file_fix_count:
f = self._external_file
if not f:
print 'Fixed %d errors in %s' % (self._file_fix_count, self._file_name)
f = open(self._file_name, 'w')
token = self._file_token
char_count = 0
while token:
f.write(token.string)
char_count += len(token.string)
if token.IsLastInLine():
f.write('\n')
if char_count > 80 and token.line_number in self._file_changed_lines:
print 'WARNING: Line %d of %s is now longer than 80 characters.' % (
token.line_number, self._file_name)
char_count = 0
token = token.next
if not self._external_file:
# Close the file if we created it
f.close()
| bsd-3-clause | 5,617,445,518,697,959,000 | -201,948,209,508,810,460 | 37.713647 | 80 | 0.629587 | false |
OpenTechFund/WebApp | addressfield/fields.py | 1 | 1809 | import json
from os import path
from django import forms
from django.core.exceptions import ValidationError
from .widgets import AddressWidget
basepath = path.dirname(__file__)
filepath = path.abspath(path.join(basepath, "static", "addressfield.min.json"))
with open(filepath, encoding='utf8') as address_data:
countries = json.load(address_data)['options']
VALIDATION_DATA = {country['iso']: country for country in countries}
def flatten_data(data):
flattened = dict()
for d in data:
for k, v in d.items():
if isinstance(v, list):
value = flatten_data(v)
else:
value = {k: v}
flattened.update(value)
return flattened
class AddressField(forms.CharField):
"""
The field stores the address in a flattened form,
so the locality components are on the same level as country or premise
"""
widget = AddressWidget
data = VALIDATION_DATA
def clean(self, value, **kwargs):
country = value['country']
try:
country_data = self.data[country]
except KeyError:
raise ValidationError('Invalid country selected')
fields = flatten_data(country_data['fields'])
missing_fields = set(country_data['required']) - set(field for field, value in value.items() if value)
if missing_fields:
missing_field_name = [fields[field]['label'] for field in missing_fields]
raise ValidationError('Please provide data for: {}'.format(', '.join(missing_field_name)))
return super().clean(value, **kwargs)
def to_python(self, value):
return json.dumps(value)
def prepare_value(self, value):
try:
return json.loads(value)
except TypeError:
return value
| gpl-2.0 | -7,876,983,167,601,791,000 | 1,153,497,964,357,142,800 | 28.655738 | 110 | 0.632946 | false |
40023256/W17test | wsgi.py | 1 | 27073 | # coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
import math
from cherrypy.lib.static import serve_file
# 導入 gear 模組
#import gear
import man
import man2
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
def downloadlist_access_list(files, starti, endi):
# different extension files, associated links were provided
# popup window to view images, video or STL files, other files can be downloaded directly
# files are all the data to list, from starti to endi
# add file size
outstring = ""
for index in range(int(starti)-1, int(endi)):
fileName, fileExtension = os.path.splitext(files[index])
fileExtension = fileExtension.lower()
fileSize = sizeof_fmt(os.path.getsize(download_root_dir+"downloads/"+files[index]))
# images files
if fileExtension == ".png" or fileExtension == ".jpg" or fileExtension == ".gif":
outstring += '<input type="checkbox" name="filename" value="'+files[index]+'"><a href="javascript:;" onClick="window.open(\'/downloads/'+ \
files[index]+'\',\'images\', \'catalogmode\',\'scrollbars\')">'+files[index]+'</a> ('+str(fileSize)+')<br />'
# stl files
elif fileExtension == ".stl":
outstring += '<input type="checkbox" name="filename" value="'+files[index]+'"><a href="javascript:;" onClick="window.open(\'/static/viewstl.html?src=/downloads/'+ \
files[index]+'\',\'images\', \'catalogmode\',\'scrollbars\')">'+files[index]+'</a> ('+str(fileSize)+')<br />'
# flv files
elif fileExtension == ".flv":
outstring += '<input type="checkbox" name="filename" value="'+files[index]+'"><a href="javascript:;" onClick="window.open(\'/flvplayer?filepath=/downloads/'+ \
files[index]+'\',\'images\', \'catalogmode\',\'scrollbars\')">'+files[index]+'</a> ('+str(fileSize)+')<br />'
# direct download files
else:
outstring += "<input type='checkbox' name='filename' value='"+files[index]+"'><a href='/download/?filepath="+download_root_dir.replace('\\', '/')+ \
"downloads/"+files[index]+"'>"+files[index]+"</a> ("+str(fileSize)+")<br />"
return outstring
def sizeof_fmt(num):
for x in ['bytes','KB','MB','GB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, 'TB')
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Midterm(object):
# Midterm 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
def __init__(self):
# hope to create downloads and images directories
if not os.path.isdir(download_root_dir+"downloads"):
try:
os.makedirs(download_root_dir+"downloads")
except:
print("mkdir error")
if not os.path.isdir(download_root_dir+"images"):
try:
os.makedirs(download_root_dir+"images")
except:
print("mkdir error")
if not os.path.isdir(download_root_dir+"tmp"):
try:
os.makedirs(download_root_dir+"tmp")
except:
print("mkdir error")
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index(self):
outstring = '''
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<a href="a_40023256">a_40023256</a><br />
<a href="drawspur1">drawspur1</a><br />
<a href="drawspuraction1">drawspuraction1</a><br />
</body>
</html>
'''
return outstring
@cherrypy.expose
def a_40023256(self):
outstring = '''
<!DOCTYPE html>
<html>
<head>
40023256
<head>
<body>
<br /><a href="index">index</a><br />
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def spur1(self, N1=15, N2=24, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=spuraction1>
齒數1:<input type=text name=N1 value='''+str(N1)+'''><br />
齒數2:<input type=text name=N2 value='''+str(N2)+'''><br />
模數:<input type=text name=M value = '''+str(M)+'''><br />
壓力角:<input type=text name=P value = '''+str(P)+'''><br />
<input type=submit value=send>
</form>
<br /><a href="index">index</a><br />
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def spuraction1(self, N1=15, N2=24, M=5, P=15):
output = '''
<!doctype html><html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>2015CD Midterm</title>
</head>
<body>
'''
output += "齒數1為"+str(N1)+"<br />"
output += "齒數2為"+str(N2)+"<br />"
output += "模數為"+str(M)+"<br />"
output += "壓力角為"+str(P)+"<br />"
output +='''<br /><a href="/spur1">spur1</a>(按下後再輸入)<br />'''
output +='''<br /><a href="index">index</a><br />
</body>
</html>
'''
return output
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def drawspur1(self, N1=15, N2=24, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
</head>
<body>
<form method=POST action=drawspuraction1>
齒數1:<input type=text name=N1 value='''+str(N1)+'''><br />
齒數2:<input type=text name=N2 value='''+str(N2)+'''><br />
<input type=submit value=畫出正齒輪輪廓><br />
(範圍:15~80)
</form>
<br /><a href="index">index</a><br />
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script>
window.onload=function(){
brython();
}
</script>
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def drawspuraction1(self, N1=15, N2=24, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<a href="index">index</a><br />
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 P 單位為角度
M = '''+str(M)+'''
# 壓力角 pa 單位為角度
pa = '''+str(P)+'''
# 齒輪齒數
n_g1 = '''+str(N1)+'''
n_g2 = '''+str(N2)+'''
# 計算兩齒輪的節圓半徑
rp_g1 = M*n_g1/2
rp_g2 = M*n_g2/2
# 繪圖第1齒輪的圓心座標
x_g1 = 400
y_g1 = 400
# 第2齒輪的圓心座標, 假設排列成水平, 表示各齒輪圓心 y 座標相同
x_g2 = x_g1
y_g2 = y_g1+ rp_g1 + rp_g2
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g1, y_g1)
# rotate to engage
ctx.rotate(pi)
# put it back
ctx.translate(-x_g1, -y_g1)
spur.Spur(ctx).Gear(x_g1, y_g1, rp_g1, n_g1, pa, "blue")
ctx.restore()
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g2, y_g2)
# rotate to engage
ctx.rotate(-pi/n_g2)
# put it back
ctx.translate(-x_g2, -y_g2)
spur.Spur(ctx).Gear(x_g2, y_g2, rp_g2, n_g2, pa, "green")
ctx.restore()
</script>
<canvas id="plotarea" width="3000" height="3000"></canvas>
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def spur(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.3-20150514-095342/brython.js"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=spuraction>
齒數:<input type=text name=N value='''+str(N)+'''><br />
模數:<input type=text name=M value = '''+str(M)+'''><br />
壓力角:<input type=text name=P value = '''+str(P)+'''><br />
<input type=submit value=send>
</form>
<br /><a href="index">index</a><br />
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def spuraction(self, N=20, M=5, P=15):
output = '''
<!doctype html><html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>2015CD Midterm</title>
</head>
<body>
'''
output += "齒數為"+str(N)+"<br />"
output += "模數為"+str(M)+"<br />"
output += "壓力角為"+str(P)+"<br />"
output +='''<br /><a href="/spur">spur</a>(按下後再輸入)<br />'''
output +='''<br /><a href="index">index</a><br />
</body>
</html>
'''
return output
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def drawspur(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
</head>
<body>
<form method=POST action=drawspuraction>
齒數:<input type=text name=N value='''+str(N)+'''><br />
模數:<input type=text name=M value = '''+str(M)+'''><br />
壓力角:<input type=text name=P value = '''+str(P)+'''><br />
<input type=submit value=畫出正齒輪輪廓>
</form>
<br /><a href="index">index</a><br />
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.3-20150514-095342/brython.js"></script>
<script>
window.onload=function(){
brython();
}
</script>
</body>
</html>
'''
return outstring
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def drawspuraction(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
</head>
<body>
<a href="index">index</a><br />
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖
# N 為齒數
N = '''+str(N)+'''
# M 為模數
M = '''+str(M)+'''
# 壓力角 P 單位為角度
P = '''+str(P)+'''
# 計算兩齒輪的節圓半徑
rp = N*M/2
spur.Spur(ctx).Gear(600, 600, rp, N, P, "blue")
</script>
<canvas id="plotarea" width="1200" height="1200"></canvas>
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.3-20150514-095342/brython.js"></script>
<script>
window.onload=function(){
brython();
}
</script>
</body>
</html>
'''
return outstring
@cherrypy.expose
# W 為正方體的邊長
def cube(self, W=10):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
</head>
<body>
<!-- 使用者輸入表單的參數交由 cubeaction 方法處理 -->
<form method=POST action=cubeaction>
正方體邊長:<input type=text name=W value='''+str(W)+'''><br />
<input type=submit value=送出>
</form>
<br /><a href="index">index</a><br />
</body>
</html>
'''
return outstring
@cherrypy.expose
# W 為正方體邊長, 內定值為 10
def cubeaction(self, W=10):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 先載入 pfcUtils.js 與 wl_header.js -->
<script type="text/javascript" src="/static/weblink/pfcUtils.js"></script>
<script type="text/javascript" src="/static/weblink/wl_header.js">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.3-20150514-095342/brython.js"></script>
document.writeln ("Error loading Pro/Web.Link header!");
</script>
<script>
window.onload=function(){
brython();
}
</script>
</head>
<!-- 不要使用 body 啟動 brython() 改為 window level 啟動 -->
<body onload="">
<h1>Creo 參數化零件</h1>
<a href="index">index</a><br />
<!-- 以下為 Creo Pro/Web.Link 程式, 將 JavaScrip 改為 Brython 程式 -->
<script type="text/python">
from browser import document, window
from math import *
# 這個區域為 Brython 程式範圍, 註解必須採用 Python 格式
# 因為 pfcIsWindows() 為原生的 JavaScript 函式, 在 Brython 中引用必須透過 window 物件
if (!window.pfcIsWindows()) window.netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
# 若第三輸入為 false, 表示僅載入 session, 但是不顯示
# ret 為 model open return
ret = document.pwl.pwlMdlOpen("cube.prt", "v:/tmp", false)
if (!ret.Status):
window.alert("pwlMdlOpen failed (" + ret.ErrorCode + ")")
# 將 ProE 執行階段設為變數 session
session = window.pfcGetProESession()
# 在視窗中打開零件檔案, 並且顯示出來
pro_window = session.OpenFile(pfcCreate("pfcModelDescriptor").CreateFromFileName("cube.prt"))
solid = session.GetModel("cube.prt", window.pfcCreate("pfcModelType").MDL_PART)
# 在 Brython 中與 Python 語法相同, 只有初值設定問題, 無需宣告變數
# length, width, myf, myn, i, j, volume, count, d1Value, d2Value
# 將模型檔中的 length 變數設為 javascript 中的 length 變數
length = solid.GetParam("a1")
# 將模型檔中的 width 變數設為 javascript 中的 width 變數
width = solid.GetParam("a2")
# 改變零件尺寸
# myf=20
# myn=20
volume = 0
count = 0
try:
# 以下採用 URL 輸入對應變數
# createParametersFromArguments ();
# 以下則直接利用 javascript 程式改變零件參數
for i in range(5):
myf ='''+str(W)+'''
myn ='''+str(W)+''' + i*2.0
# 設定變數值, 利用 ModelItem 中的 CreateDoubleParamValue 轉換成 Pro/Web.Link 所需要的浮點數值
d1Value = window.pfcCreate ("MpfcModelItem").CreateDoubleParamValue(myf)
d2Value = window.pfcCreate ("MpfcModelItem").CreateDoubleParamValue(myn)
# 將處理好的變數值, 指定給對應的零件變數
length.Value = d1Value
width.Value = d2Value
# 零件尺寸重新設定後, 呼叫 Regenerate 更新模型
# 在 JavaScript 為 null 在 Brython 為 None
solid.Regenerate(None)
# 利用 GetMassProperty 取得模型的質量相關物件
properties = solid.GetMassProperty(None)
# volume = volume + properties.Volume
volume = properties.Volume
count = count + 1
window.alert("執行第"+count+"次,零件總體積:"+volume)
# 將零件存為新檔案
newfile = document.pwl.pwlMdlSaveAs("cube.prt", "v:/tmp", "cube"+count+".prt")
if (!newfile.Status):
window.alert("pwlMdlSaveAs failed (" + newfile.ErrorCode + ")")
# window.alert("共執行:"+count+"次,零件總體積:"+volume)
# window.alert("零件體積:"+properties.Volume)
# window.alert("零件體積取整數:"+Math.round(properties.Volume));
except:
window.alert ("Exception occurred: "+window.pfcGetExceptionType (err))
</script>
'''
return outstring
@cherrypy.expose
def fileuploadform(self):
return '''<h1>file upload</h1>
<script src="/static/jquery.js" type="text/javascript"></script>
<script src="/static/axuploader.js" type="text/javascript"></script>
<script>
$(document).ready(function(){
$('.prova').axuploader({url:'fileaxupload', allowExt:['jpg','png','gif','7z','pdf','zip','flv','stl','swf'],
finish:function(x,files)
{
alert('All files have been uploaded: '+files);
},
enable:true,
remotePath:function(){
return 'downloads/';
}
});
});
</script>
<div class="prova"></div>
<input type="button" onclick="$('.prova').axuploader('disable')" value="asd" />
<input type="button" onclick="$('.prova').axuploader('enable')" value="ok" />
</section></body></html>
'''
@cherrypy.expose
def fileaxupload(self, *args, **kwargs):
filename = kwargs["ax-file-name"]
flag = kwargs["start"]
if flag == "0":
file = open(download_root_dir+"downloads/"+filename, "wb")
else:
file = open(download_root_dir+"downloads/"+filename, "ab")
file.write(cherrypy.request.body.read())
header= cherrypy.request.body.read(80)
file.close()
return "files uploaded!"+header.decode("UTF-8")
@cherrypy.expose
def download_list(self, item_per_page=5, page=1, keyword=None, *args, **kwargs):
files = os.listdir(download_root_dir+"downloads/")
total_rows = len(files)
totalpage = math.ceil(total_rows/int(item_per_page))
starti = int(item_per_page) * (int(page) - 1) + 1
endi = starti + int(item_per_page) - 1
outstring = "<form method='post' action='delete_file'>"
notlast = False
if total_rows > 0:
outstring += "<br />"
if (int(page) * int(item_per_page)) < total_rows:
notlast = True
if int(page) > 1:
outstring += "<a href='"
outstring += "download_list?&page=1&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'><<</a> "
page_num = int(page) - 1
outstring += "<a href='"
outstring += "download_list?&page="+str(page_num)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>Previous</a> "
span = 10
for index in range(int(page)-span, int(page)+span):
if index>= 0 and index< totalpage:
page_now = index + 1
if page_now == int(page):
outstring += "<font size='+1' color='red'>"+str(page)+" </font>"
else:
outstring += "<a href='"
outstring += "download_list?&page="+str(page_now)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>"+str(page_now)+"</a> "
if notlast == True:
nextpage = int(page) + 1
outstring += " <a href='"
outstring += "download_list?&page="+str(nextpage)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>Next</a>"
outstring += " <a href='"
outstring += "download_list?&page="+str(totalpage)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>>></a><br /><br />"
if (int(page) * int(item_per_page)) < total_rows:
notlast = True
outstring += downloadlist_access_list(files, starti, endi)+"<br />"
else:
outstring += "<br /><br />"
outstring += downloadlist_access_list(files, starti, total_rows)+"<br />"
if int(page) > 1:
outstring += "<a href='"
outstring += "download_list?&page=1&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'><<</a> "
page_num = int(page) - 1
outstring += "<a href='"
outstring += "download_list?&page="+str(page_num)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>Previous</a> "
span = 10
for index in range(int(page)-span, int(page)+span):
#for ($j=$page-$range;$j<$page+$range;$j++)
if index >=0 and index < totalpage:
page_now = index + 1
if page_now == int(page):
outstring += "<font size='+1' color='red'>"+str(page)+" </font>"
else:
outstring += "<a href='"
outstring += "download_list?&page="+str(page_now)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>"+str(page_now)+"</a> "
if notlast == True:
nextpage = int(page) + 1
outstring += " <a href='"
outstring += "download_list?&page="+str(nextpage)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>Next</a>"
outstring += " <a href='"
outstring += "download_list?&page="+str(totalpage)+"&item_per_page="+str(item_per_page)+"&keyword="+str(cherrypy.session.get('download_keyword'))
outstring += "'>>></a>"
else:
outstring += "no data!"
outstring += "<br /><br /><input type='submit' value='delete'><input type='reset' value='reset'></form>"
return "<div class='container'><nav>"+ \
"</nav><section><h1>Download List</h1>"+outstring+"<br/><br /></body></html>"
class Download:
@cherrypy.expose
def index(self, filepath):
return serve_file(filepath, "application/x-download", "attachment")
################# (4) 程式啟動區
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
# 程式執行目錄下, 必須自行建立 static 目錄
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"},
'/images':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/images"}
}
root = Midterm()
root.download = Download()
root.man = man.MAN()
root.man2 = man2.MAN()
#root.gear = gear.Gear()
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(root, config=application_conf)
else:
# 表示在近端執行
cherrypy.quickstart(root, config=application_conf)
| agpl-3.0 | -1,107,365,164,329,020,000 | 721,129,142,110,271,100 | 34.536797 | 180 | 0.552605 | false |
RoboAvatar65/ChessRobot | algorithm.py | 1 | 5095 | ############################################################
###algorithm.py Chess Algorithm Engine ###
###Written by Nicholas Maselli ###
### ###
###Purpose: The Algorithm class creates a minimax tree ###
###that utilizies alpha beta pruning and iterative ###
###deepening to search through the tree quickly. Piece ###
###square tables are used to obtain good value functions ###
###for chess board evaluation. ###
### ###
###Version: 1.0 ###
###Date: 6-30-17 ###
############################################################
from chess import Chess
import random
import collections
import time
###############################
#####MinimaxGameTree Class#####
###############################
class MinimaxGameTree():
def __init__(self, chess, color, depth):
self.chess = chess
self.player = color
self.depth = depth
#Time
self.fulltime = 0
#Continuously Iterate search depth to obtain better move ordering
def iterativeDeepening(self):
alpha = -40000
beta = 40000
pv = []
for depth in range(1, self.depth+1):
data = self.dfsMax(alpha, beta, depth, pv)
pv = data[1]
best_value = data[0]
move_list = data[1]
best_move = move_list[self.depth-1]
return(best_move)
#Minimax algorithm with alpha-beta pruning, max function
def dfsMax(self, alpha, beta, depth, pv):
if (depth == 0):
value = self.evaluate_board(self.player)
return((value, []))
#Start with the principal value move
move_list = []
best_move = None
if (pv != []):
move = pv.pop()
self.next_position(move)
data = self.dfsMin(alpha, beta, depth-1, pv)
self.previous_position()
value = data[0]
if (value >= beta):
move_list = data[1]
move_list.append(best_move)
return((beta, move_list))
if (value > alpha):
alpha = value
best_move = move
move_list = data[1]
for move in self.chess.legal_moves():
self.next_position(move)
data = self.dfsMin(alpha, beta, depth-1, pv)
self.previous_position()
value = data[0]
if (value >= beta):
move_list = data[1]
move_list.append(best_move)
return((beta, move_list))
if (value > alpha):
alpha = value
best_move = move
move_list = data[1]
#If you are in checkmate
if (best_move == None):
alpha = -20000
move_list.append(best_move)
return((alpha, move_list))
#Minimax algorithm with alpha-beta pruning, min function
def dfsMin(self, alpha, beta, depth, pv):
if (depth == 0):
value = self.evaluate_board(self.player)
return((value, []))
#Start with the principal value move
move_list = []
best_move = None
if (pv != []):
move = pv.pop()
self.next_position(move)
data = self.dfsMax(alpha, beta, depth-1, pv)
self.previous_position()
value = data[0]
if (value <= alpha):
move_list = data[1]
move_list.append(best_move)
return((alpha, move_list))
if (value < beta):
beta = value
best_move = move
move_list = data[1]
for move in self.chess.legal_moves():
self.next_position(move)
data = self.dfsMax(alpha, beta, depth-1, pv)
self.previous_position()
value = data[0]
if (value <= alpha):
move_list = data[1]
move_list.append(best_move)
return((alpha, move_list))
if (value < beta):
beta = value
best_move = move
move_list = data[1]
#If opponent is in checkmate
if (best_move == None):
beta = 20000
move_list.append(best_move)
return((beta, move_list))
#Evaluate the current board and state from color's perspective
def evaluate_board(self, color):
if (color == 'white'):
value = self.chess.state.value
if (color == 'black'):
value = -self.chess.state.value
return(value)
#Move to the next position in the chess board
def next_position(self, move):
self.chess.move_piece(move)
#Move to previous position in the chessboard
def previous_position(self):
self.chess.undo_move()
#########################
#####Algorithm Class#####
#########################
class Algorithm():
#Initialize values
def __init__(self, chess, player, depth):
self.chess = chess
self.player = player
self.depth = depth
self.fulltime = 0
#Choose next move using algorithm
def best_move(self):
self.tree = MinimaxGameTree(self.chess, self.player, self.depth)
#Comments here for timing purposes
#start_time = time.time()
move = self.tree.iterativeDeepening()
#end_time = time.time()
#print("Searching the tree: {}".format(end_time - start_time))
notation = self.chess.coordinate_to_notation(move)
return(notation) | mit | -3,002,454,787,740,023,000 | 943,866,492,980,330,900 | 26.160221 | 66 | 0.547203 | false |
tpazderka/pysaml2 | src/saml2/server.py | 1 | 27849 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
"""Contains classes and functions that a SAML2.0 Identity provider (IdP)
or attribute authority (AA) may use to conclude its tasks.
"""
import logging
import os
import importlib
import shelve
import threading
from saml2.eptid import EptidShelve, Eptid
from saml2.saml import EncryptedAssertion
from saml2.sdb import SessionStorage
from saml2.schema import soapenv
from saml2.samlp import NameIDMappingResponse
from saml2.entity import Entity
from saml2 import saml, element_to_extension_element
from saml2 import class_name
from saml2 import BINDING_HTTP_REDIRECT
from saml2.request import AuthnRequest
from saml2.request import AssertionIDRequest
from saml2.request import AttributeQuery
from saml2.request import NameIDMappingRequest
from saml2.request import AuthzDecisionQuery
from saml2.request import AuthnQuery
from saml2.s_utils import MissingValue, Unknown, rndstr
from saml2.sigver import pre_signature_part, signed_instance_factory, CertificateError, CryptoBackendXmlSec1
from saml2.assertion import Assertion
from saml2.assertion import Policy
from saml2.assertion import restriction_from_attribute_spec
from saml2.assertion import filter_attribute_value_assertions
from saml2.ident import IdentDB
from saml2.profile import ecp
logger = logging.getLogger(__name__)
AUTHN_DICT_MAP = {
"decl": "authn_decl",
"authn_auth": "authn_auth",
"class_ref": "authn_class",
"authn_instant": "authn_instant",
"subject_locality": "subject_locality"
}
class Server(Entity):
""" A class that does things that IdPs or AAs do """
def __init__(self, config_file="", config=None, cache=None, stype="idp",
symkey=""):
Entity.__init__(self, stype, config, config_file)
self.eptid = None
self.init_config(stype)
self.cache = cache
self.ticket = {}
#
self.session_db = self.choose_session_storage()
# Needed for
self.symkey = symkey
self.seed = rndstr()
self.iv = os.urandom(16)
self.lock = threading.Lock()
def getvalid_certificate_str(self):
if self.sec.cert_handler is not None:
return self.sec.cert_handler._last_validated_cert
return None
def support_AssertionIDRequest(self):
return True
def support_AuthnQuery(self):
return True
def choose_session_storage(self):
_spec = self.config.getattr("session_storage", "idp")
if not _spec:
return SessionStorage()
elif isinstance(_spec, basestring):
if _spec.lower() == "memory":
return SessionStorage()
else: # Should be tuple
typ, data = _spec
if typ.lower() == "mongodb":
from saml2.mongo_store import SessionStorageMDB
return SessionStorageMDB(database=data, collection="session")
raise NotImplementedError("No such storage type implemented")
def init_config(self, stype="idp"):
""" Remaining init of the server configuration
:param stype: The type of Server ("idp"/"aa")
"""
if stype == "aa":
return
# subject information is stored in a database
# default database is in memory which is OK in some setups
dbspec = self.config.getattr("subject_data", "idp")
idb = None
typ = ""
if not dbspec:
idb = {}
elif isinstance(dbspec, basestring):
idb = shelve.open(dbspec, writeback=True)
else: # database spec is a a 2-tuple (type, address)
#print >> sys.stderr, "DBSPEC: %s" % (dbspec,)
(typ, addr) = dbspec
if typ == "shelve":
idb = shelve.open(addr, writeback=True)
elif typ == "memcached":
import memcache
idb = memcache.Client(addr)
elif typ == "dict": # in-memory dictionary
idb = {}
elif typ == "mongodb":
from saml2.mongo_store import IdentMDB
self.ident = IdentMDB(database=addr, collection="ident")
elif typ == "identdb":
mod, clas = addr.rsplit('.', 1)
mod = importlib.import_module(mod)
self.ident = getattr(mod, clas)()
if typ == "mongodb" or typ == "identdb":
pass
elif idb is not None:
self.ident = IdentDB(idb)
elif dbspec:
raise Exception("Couldn't open identity database: %s" %
(dbspec,))
_domain = self.config.getattr("domain", "idp")
if _domain:
self.ident.domain = _domain
self.ident.name_qualifier = self.config.entityid
dbspec = self.config.getattr("edu_person_targeted_id", "idp")
if not dbspec:
pass
else:
typ = dbspec[0]
addr = dbspec[1]
secret = dbspec[2]
if typ == "shelve":
self.eptid = EptidShelve(secret, addr)
elif typ == "mongodb":
from saml2.mongo_store import EptidMDB
self.eptid = EptidMDB(secret, database=addr,
collection="eptid")
else:
self.eptid = Eptid(secret)
def wants(self, sp_entity_id, index=None):
""" Returns what attributes the SP requires and which are optional
if any such demands are registered in the Metadata.
:param sp_entity_id: The entity id of the SP
:param index: which of the attribute consumer services its all about
if index == None then all attribute consumer services are clumped
together.
:return: 2-tuple, list of required and list of optional attributes
"""
return self.metadata.attribute_requirement(sp_entity_id, index)
def verify_assertion_consumer_service(self, request):
_acs = request.assertion_consumer_service_url
_aci = request.assertion_consumer_service_index
_binding = request.protocol_binding
_eid = request.issuer.text
if _acs:
# look up acs in for that binding in the metadata given the issuer
# Assuming the format is entity
for acs in self.metadata.assertion_consumer_service(_eid, _binding):
if _acs == acs.text:
return True
elif _aci:
for acs in self.metadata.assertion_consumer_service(_eid, _binding):
if _aci == acs.index:
return True
return False
# -------------------------------------------------------------------------
def parse_authn_request(self, enc_request, binding=BINDING_HTTP_REDIRECT):
"""Parse a Authentication Request
:param enc_request: The request in its transport format
:param binding: Which binding that was used to transport the message
to this entity.
:return: A dictionary with keys:
consumer_url - as gotten from the SPs entity_id and the metadata
id - the id of the request
sp_entity_id - the entity id of the SP
request - The verified request
"""
return self._parse_request(enc_request, AuthnRequest,
"single_sign_on_service", binding)
def parse_attribute_query(self, xml_string, binding):
""" Parse an attribute query
:param xml_string: The Attribute Query as an XML string
:param binding: Which binding that was used for the request
:return: A query instance
"""
return self._parse_request(xml_string, AttributeQuery,
"attribute_service", binding)
def parse_authz_decision_query(self, xml_string, binding):
""" Parse an authorization decision query
:param xml_string: The Authz decision Query as an XML string
:param binding: Which binding that was used when receiving this query
:return: Query instance
"""
return self._parse_request(xml_string, AuthzDecisionQuery,
"authz_service", binding)
def parse_assertion_id_request(self, xml_string, binding):
""" Parse an assertion id query
:param xml_string: The AssertionIDRequest as an XML string
:param binding: Which binding that was used when receiving this request
:return: Query instance
"""
return self._parse_request(xml_string, AssertionIDRequest,
"assertion_id_request_service", binding)
def parse_authn_query(self, xml_string, binding):
""" Parse an authn query
:param xml_string: The AuthnQuery as an XML string
:param binding: Which binding that was used when receiving this query
:return: Query instance
"""
return self._parse_request(xml_string, AuthnQuery,
"authn_query_service", binding)
def parse_name_id_mapping_request(self, xml_string, binding):
""" Parse a nameid mapping request
:param xml_string: The NameIDMappingRequest as an XML string
:param binding: Which binding that was used when receiving this request
:return: Query instance
"""
return self._parse_request(xml_string, NameIDMappingRequest,
"name_id_mapping_service", binding)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
def _authn_response(self, in_response_to, consumer_url,
sp_entity_id, identity=None, name_id=None,
status=None, authn=None, issuer=None, policy=None,
sign_assertion=False, sign_response=False,
best_effort=False, encrypt_assertion=False, encrypt_cert=None):
""" Create a response. A layer of indirection.
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param name_id: The identifier of the subject
:param status: The status of the response
:param authn: A dictionary containing information about the
authn context.
:param issuer: The issuer of the response
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the response should be signed or not
:param best_effort: Even if not the SPs demands can be met send a
response.
:return: A response instance
"""
to_sign = []
args = {}
#if identity:
_issuer = self._issuer(issuer)
ast = Assertion(identity)
ast.acs = self.config.getattr("attribute_converters", "idp")
if policy is None:
policy = Policy()
try:
ast.apply_policy(sp_entity_id, policy, self.metadata)
except MissingValue, exc:
if not best_effort:
return self.create_error_response(in_response_to, consumer_url,
exc, sign_response)
if authn: # expected to be a dictionary
# Would like to use dict comprehension but ...
authn_args = dict([
(AUTHN_DICT_MAP[k], v) for k, v in authn.items()
if k in AUTHN_DICT_MAP])
assertion = ast.construct(sp_entity_id, in_response_to,
consumer_url, name_id,
self.config.attribute_converters,
policy, issuer=_issuer,
**authn_args)
else:
assertion = ast.construct(sp_entity_id, in_response_to,
consumer_url, name_id,
self.config.attribute_converters,
policy, issuer=_issuer)
if sign_assertion is not None and sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 1)
# Just the assertion or the response and the assertion ?
to_sign = [(class_name(assertion), assertion.id)]
# Store which assertion that has been sent to which SP about which
# subject.
# self.cache.set(assertion.subject.name_id.text,
# sp_entity_id, {"ava": identity, "authn": authn},
# assertion.conditions.not_on_or_after)
args["assertion"] = assertion
if self.support_AssertionIDRequest() or self.support_AuthnQuery():
self.session_db.store_assertion(assertion, to_sign)
return self._response(in_response_to, consumer_url, status, issuer,
sign_response, to_sign, encrypt_assertion=encrypt_assertion,
encrypt_cert=encrypt_cert, **args)
# ------------------------------------------------------------------------
#noinspection PyUnusedLocal
def create_attribute_response(self, identity, in_response_to, destination,
sp_entity_id, userid="", name_id=None,
status=None, issuer=None,
sign_assertion=False, sign_response=False,
attributes=None, **kwargs):
""" Create an attribute assertion response.
:param identity: A dictionary with attributes and values that are
expected to be the bases for the assertion in the response.
:param in_response_to: The session identifier of the request
:param destination: The URL which should receive the response
:param sp_entity_id: The entity identifier of the SP
:param userid: A identifier of the user
:param name_id: The identifier of the subject
:param status: The status of the response
:param issuer: The issuer of the response
:param sign_assertion: Whether the assertion should be signed or not
:param sign_response: Whether the whole response should be signed
:param attributes:
:param kwargs: To catch extra keyword arguments
:return: A response instance
"""
policy = self.config.getattr("policy", "aa")
if not name_id and userid:
try:
name_id = self.ident.construct_nameid(userid, policy,
sp_entity_id)
logger.warning("Unspecified NameID format")
except Exception:
pass
to_sign = []
args = {}
if identity:
_issuer = self._issuer(issuer)
ast = Assertion(identity)
if policy:
ast.apply_policy(sp_entity_id, policy, self.metadata)
else:
policy = Policy()
if attributes:
restr = restriction_from_attribute_spec(attributes)
ast = filter_attribute_value_assertions(ast)
assertion = ast.construct(sp_entity_id, in_response_to,
destination, name_id,
self.config.attribute_converters,
policy, issuer=_issuer)
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 1)
# Just the assertion or the response and the assertion ?
to_sign = [(class_name(assertion), assertion.id)]
args["assertion"] = assertion
return self._response(in_response_to, destination, status, issuer,
sign_response, to_sign, **args)
# ------------------------------------------------------------------------
def create_authn_response(self, identity, in_response_to, destination,
sp_entity_id, name_id_policy=None, userid=None,
name_id=None, authn=None, issuer=None,
sign_response=None, sign_assertion=None, encrypt_cert=None, encrypt_assertion=None,
**kwargs):
""" Constructs an AuthenticationResponse
:param identity: Information about an user
:param in_response_to: The identifier of the authentication request
this response is an answer to.
:param destination: Where the response should be sent
:param sp_entity_id: The entity identifier of the Service Provider
:param name_id_policy: How the NameID should be constructed
:param userid: The subject identifier
:param authn: Dictionary with information about the authentication
context
:param issuer: Issuer of the response
:param sign_assertion: Whether the assertion should be signed or not.
:param sign_response: Whether the response should be signed or not.
:return: A response instance
"""
try:
policy = kwargs["release_policy"]
except KeyError:
policy = self.config.getattr("policy", "idp")
try:
best_effort = kwargs["best_effort"]
except KeyError:
best_effort = False
if sign_assertion is None:
sign_assertion = self.config.getattr("sign_assertion", "idp")
if sign_assertion is None:
sign_assertion = False
if sign_response is None:
sign_response = self.config.getattr("sign_response", "idp")
if sign_response is None:
sign_response = False
if encrypt_assertion is None:
encrypt_assertion = self.config.getattr("encrypt_assertion", "idp")
if encrypt_assertion is None:
encrypt_assertion = False
if encrypt_assertion:
if encrypt_cert is not None:
verify_encrypt_cert = self.config.getattr("verify_encrypt_cert", "idp")
if verify_encrypt_cert is not None:
if not verify_encrypt_cert(encrypt_cert):
raise CertificateError("Invalid certificate for encryption!")
else:
raise CertificateError("No SPCertEncType certificate for encryption contained in authentication "
"request.")
else:
encrypt_assertion = False
if not name_id:
try:
nid_formats = []
for _sp in self.metadata[sp_entity_id]["spsso_descriptor"]:
if "name_id_format" in _sp:
nid_formats.extend([n["text"] for n in
_sp["name_id_format"]])
try:
snq = name_id_policy.sp_name_qualifier
except AttributeError:
snq = sp_entity_id
if not snq:
snq = sp_entity_id
kwa = {"sp_name_qualifier": snq}
try:
kwa["format"] = name_id_policy.format
except AttributeError:
pass
_nids = self.ident.find_nameid(userid, **kwa)
# either none or one
if _nids:
name_id = _nids[0]
else:
name_id = self.ident.construct_nameid(userid, policy,
sp_entity_id,
name_id_policy)
except IOError, exc:
response = self.create_error_response(in_response_to,
destination,
sp_entity_id,
exc, name_id)
return ("%s" % response).split("\n")
try:
_authn = authn
if (sign_assertion or sign_response) and self.sec.cert_handler.generate_cert():
with self.lock:
self.sec.cert_handler.update_cert(True)
return self._authn_response(in_response_to, # in_response_to
destination, # consumer_url
sp_entity_id, # sp_entity_id
identity, # identity as dictionary
name_id,
authn=_authn,
issuer=issuer,
policy=policy,
sign_assertion=sign_assertion,
sign_response=sign_response,
best_effort=best_effort,
encrypt_assertion=encrypt_assertion,
encrypt_cert=encrypt_cert)
return self._authn_response(in_response_to, # in_response_to
destination, # consumer_url
sp_entity_id, # sp_entity_id
identity, # identity as dictionary
name_id,
authn=_authn,
issuer=issuer,
policy=policy,
sign_assertion=sign_assertion,
sign_response=sign_response,
best_effort=best_effort,
encrypt_assertion=encrypt_assertion,
encrypt_cert=encrypt_cert)
except MissingValue, exc:
return self.create_error_response(in_response_to, destination,
sp_entity_id, exc, name_id)
def create_authn_request_response(self, identity, in_response_to,
destination, sp_entity_id,
name_id_policy=None, userid=None,
name_id=None, authn=None, authn_decl=None,
issuer=None, sign_response=False,
sign_assertion=False, **kwargs):
return self.create_authn_response(identity, in_response_to, destination,
sp_entity_id, name_id_policy, userid,
name_id, authn, issuer,
sign_response, sign_assertion,
authn_decl=authn_decl)
#noinspection PyUnusedLocal
def create_assertion_id_request_response(self, assertion_id, sign=False,
**kwargs):
"""
:param assertion_id:
:param sign:
:return:
"""
try:
(assertion, to_sign) = self.session_db.get_assertion(assertion_id)
except KeyError:
raise Unknown
if to_sign:
if assertion.signature is None:
assertion.signature = pre_signature_part(assertion.id,
self.sec.my_cert, 1)
return signed_instance_factory(assertion, self.sec, to_sign)
else:
return assertion
#noinspection PyUnusedLocal
def create_name_id_mapping_response(self, name_id=None, encrypted_id=None,
in_response_to=None,
issuer=None, sign_response=False,
status=None, **kwargs):
"""
protocol for mapping a principal's name identifier into a
different name identifier for the same principal.
Done over soap.
:param name_id:
:param encrypted_id:
:param in_response_to:
:param issuer:
:param sign_response:
:param status:
:return:
"""
# Done over SOAP
ms_args = self.message_args()
_resp = NameIDMappingResponse(name_id, encrypted_id,
in_response_to=in_response_to, **ms_args)
if sign_response:
return self.sign(_resp)
else:
logger.info("Message: %s" % _resp)
return _resp
def create_authn_query_response(self, subject, session_index=None,
requested_context=None, in_response_to=None,
issuer=None, sign_response=False,
status=None, **kwargs):
"""
A successful <Response> will contain one or more assertions containing
authentication statements.
:return:
"""
margs = self.message_args()
asserts = []
for statement in self.session_db.get_authn_statements(
subject.name_id, session_index, requested_context):
asserts.append(saml.Assertion(authn_statement=statement,
subject=subject, **margs))
if asserts:
args = {"assertion": asserts}
else:
args = {}
return self._response(in_response_to, "", status, issuer,
sign_response, to_sign=[], **args)
# ---------
def parse_ecp_authn_request(self):
pass
def create_ecp_authn_request_response(self, acs_url, identity,
in_response_to, destination,
sp_entity_id, name_id_policy=None,
userid=None, name_id=None, authn=None,
issuer=None, sign_response=False,
sign_assertion=False, **kwargs):
# ----------------------------------------
# <ecp:Response
# ----------------------------------------
ecp_response = ecp.Response(assertion_consumer_service_url=acs_url)
header = soapenv.Header()
header.extension_elements = [element_to_extension_element(ecp_response)]
# ----------------------------------------
# <samlp:Response
# ----------------------------------------
response = self.create_authn_response(identity, in_response_to,
destination, sp_entity_id,
name_id_policy, userid, name_id,
authn, issuer,
sign_response, sign_assertion)
body = soapenv.Body()
body.extension_elements = [element_to_extension_element(response)]
soap_envelope = soapenv.Envelope(header=header, body=body)
return "%s" % soap_envelope
| bsd-2-clause | -8,351,801,593,633,922,000 | -4,916,676,574,907,547,000 | 39.774524 | 113 | 0.521132 | false |
okwasi/googlemock | scripts/generator/cpp/gmock_class.py | 82 | 7454 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate Google Mock classes from base classes.
This program will read in a C++ source file and output the Google Mock
classes for the specified classes. If no class is specified, all
classes in the source file are emitted.
Usage:
gmock_class.py header-file.h [ClassName]...
Output is sent to stdout.
"""
__author__ = '[email protected] (Neal Norwitz)'
import os
import re
import sys
from cpp import ast
from cpp import utils
# Preserve compatibility with Python 2.3.
try:
_dummy = set
except NameError:
import sets
set = sets.Set
_VERSION = (1, 0, 1) # The version of this script.
# How many spaces to indent. Can set me with the INDENT environment variable.
_INDENT = 2
def _GenerateMethods(output_lines, source, class_node):
function_type = ast.FUNCTION_VIRTUAL | ast.FUNCTION_PURE_VIRTUAL
ctor_or_dtor = ast.FUNCTION_CTOR | ast.FUNCTION_DTOR
indent = ' ' * _INDENT
for node in class_node.body:
# We only care about virtual functions.
if (isinstance(node, ast.Function) and
node.modifiers & function_type and
not node.modifiers & ctor_or_dtor):
# Pick out all the elements we need from the original function.
const = ''
if node.modifiers & ast.FUNCTION_CONST:
const = 'CONST_'
return_type = 'void'
if node.return_type:
# Add modifiers like 'const'.
modifiers = ''
if node.return_type.modifiers:
modifiers = ' '.join(node.return_type.modifiers) + ' '
return_type = modifiers + node.return_type.name
template_args = [arg.name for arg in node.return_type.templated_types]
if template_args:
return_type += '<' + ', '.join(template_args) + '>'
if len(template_args) > 1:
for line in [
'// The following line won\'t really compile, as the return',
'// type has multiple template arguments. To fix it, use a',
'// typedef for the return type.']:
output_lines.append(indent + line)
if node.return_type.pointer:
return_type += '*'
if node.return_type.reference:
return_type += '&'
num_parameters = len(node.parameters)
if len(node.parameters) == 1:
first_param = node.parameters[0]
if source[first_param.start:first_param.end].strip() == 'void':
# We must treat T(void) as a function with no parameters.
num_parameters = 0
mock_method_macro = 'MOCK_%sMETHOD%d' % (const, num_parameters)
args = ''
if node.parameters:
# Due to the parser limitations, it is impossible to keep comments
# while stripping the default parameters. When defaults are
# present, we choose to strip them and comments (and produce
# compilable code).
# TODO([email protected]): Investigate whether it is possible to
# preserve parameter name when reconstructing parameter text from
# the AST.
if len([param for param in node.parameters if param.default]) > 0:
args = ', '.join(param.type.name for param in node.parameters)
else:
# Get the full text of the parameters from the start
# of the first parameter to the end of the last parameter.
start = node.parameters[0].start
end = node.parameters[-1].end
# Remove // comments.
args_strings = re.sub(r'//.*', '', source[start:end])
# Condense multiple spaces and eliminate newlines putting the
# parameters together on a single line. Ensure there is a
# space in an argument which is split by a newline without
# intervening whitespace, e.g.: int\nBar
args = re.sub(' +', ' ', args_strings.replace('\n', ' '))
# Create the mock method definition.
output_lines.extend(['%s%s(%s,' % (indent, mock_method_macro, node.name),
'%s%s(%s));' % (indent*3, return_type, args)])
def _GenerateMocks(filename, source, ast_list, desired_class_names):
processed_class_names = set()
lines = []
for node in ast_list:
if (isinstance(node, ast.Class) and node.body and
# desired_class_names being None means that all classes are selected.
(not desired_class_names or node.name in desired_class_names)):
class_name = node.name
processed_class_names.add(class_name)
class_node = node
# Add namespace before the class.
if class_node.namespace:
lines.extend(['namespace %s {' % n for n in class_node.namespace]) # }
lines.append('')
# Add the class prolog.
lines.append('class Mock%s : public %s {' % (class_name, class_name)) # }
lines.append('%spublic:' % (' ' * (_INDENT // 2)))
# Add all the methods.
_GenerateMethods(lines, source, class_node)
# Close the class.
if lines:
# If there are no virtual methods, no need for a public label.
if len(lines) == 2:
del lines[-1]
# Only close the class if there really is a class.
lines.append('};')
lines.append('') # Add an extra newline.
# Close the namespace.
if class_node.namespace:
for i in range(len(class_node.namespace)-1, -1, -1):
lines.append('} // namespace %s' % class_node.namespace[i])
lines.append('') # Add an extra newline.
if desired_class_names:
missing_class_name_list = list(desired_class_names - processed_class_names)
if missing_class_name_list:
missing_class_name_list.sort()
sys.stderr.write('Class(es) not found in %s: %s\n' %
(filename, ', '.join(missing_class_name_list)))
elif not processed_class_names:
sys.stderr.write('No class found in %s\n' % filename)
return lines
def main(argv=sys.argv):
if len(argv) < 2:
sys.stderr.write('Google Mock Class Generator v%s\n\n' %
'.'.join(map(str, _VERSION)))
sys.stderr.write(__doc__)
return 1
global _INDENT
try:
_INDENT = int(os.environ['INDENT'])
except KeyError:
pass
except:
sys.stderr.write('Unable to use indent of %s\n' % os.environ.get('INDENT'))
filename = argv[1]
desired_class_names = None # None means all classes in the source file.
if len(argv) >= 3:
desired_class_names = set(argv[2:])
source = utils.ReadFile(filename)
if source is None:
return 1
builder = ast.BuilderFromSource(source, filename)
try:
entire_ast = filter(None, builder.Generate())
except KeyboardInterrupt:
return
except:
# An error message was already printed since we couldn't parse.
pass
else:
lines = _GenerateMocks(filename, source, entire_ast, desired_class_names)
sys.stdout.write('\n'.join(lines))
if __name__ == '__main__':
main(sys.argv)
| bsd-3-clause | -2,047,057,136,528,697,900 | -3,172,017,476,170,970,600 | 34.836538 | 80 | 0.631473 | false |
zeha/multiapt | extlib/paramiko-1.7.3/paramiko/common.py | 1 | 4059 | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Common constants and global variables.
"""
MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG, MSG_SERVICE_REQUEST, \
MSG_SERVICE_ACCEPT = range(1, 7)
MSG_KEXINIT, MSG_NEWKEYS = range(20, 22)
MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE, MSG_USERAUTH_SUCCESS, \
MSG_USERAUTH_BANNER = range(50, 54)
MSG_USERAUTH_PK_OK = 60
MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE = range(60, 62)
MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE = range(80, 83)
MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, \
MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, \
MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST, \
MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE = range(90, 101)
# for debugging:
MSG_NAMES = {
MSG_DISCONNECT: 'disconnect',
MSG_IGNORE: 'ignore',
MSG_UNIMPLEMENTED: 'unimplemented',
MSG_DEBUG: 'debug',
MSG_SERVICE_REQUEST: 'service-request',
MSG_SERVICE_ACCEPT: 'service-accept',
MSG_KEXINIT: 'kexinit',
MSG_NEWKEYS: 'newkeys',
30: 'kex30',
31: 'kex31',
32: 'kex32',
33: 'kex33',
34: 'kex34',
MSG_USERAUTH_REQUEST: 'userauth-request',
MSG_USERAUTH_FAILURE: 'userauth-failure',
MSG_USERAUTH_SUCCESS: 'userauth-success',
MSG_USERAUTH_BANNER: 'userauth--banner',
MSG_USERAUTH_PK_OK: 'userauth-60(pk-ok/info-request)',
MSG_USERAUTH_INFO_RESPONSE: 'userauth-info-response',
MSG_GLOBAL_REQUEST: 'global-request',
MSG_REQUEST_SUCCESS: 'request-success',
MSG_REQUEST_FAILURE: 'request-failure',
MSG_CHANNEL_OPEN: 'channel-open',
MSG_CHANNEL_OPEN_SUCCESS: 'channel-open-success',
MSG_CHANNEL_OPEN_FAILURE: 'channel-open-failure',
MSG_CHANNEL_WINDOW_ADJUST: 'channel-window-adjust',
MSG_CHANNEL_DATA: 'channel-data',
MSG_CHANNEL_EXTENDED_DATA: 'channel-extended-data',
MSG_CHANNEL_EOF: 'channel-eof',
MSG_CHANNEL_CLOSE: 'channel-close',
MSG_CHANNEL_REQUEST: 'channel-request',
MSG_CHANNEL_SUCCESS: 'channel-success',
MSG_CHANNEL_FAILURE: 'channel-failure'
}
# authentication request return codes:
AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED = range(3)
# channel request failed reasons:
(OPEN_SUCCEEDED,
OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED,
OPEN_FAILED_CONNECT_FAILED,
OPEN_FAILED_UNKNOWN_CHANNEL_TYPE,
OPEN_FAILED_RESOURCE_SHORTAGE) = range(0, 5)
CONNECTION_FAILED_CODE = {
1: 'Administratively prohibited',
2: 'Connect failed',
3: 'Unknown channel type',
4: 'Resource shortage'
}
DISCONNECT_SERVICE_NOT_AVAILABLE, DISCONNECT_AUTH_CANCELLED_BY_USER, \
DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE = 7, 13, 14
from osrandom import OSRandomPool
# keep a crypto-strong PRNG nearby
randpool = OSRandomPool()
import sys
if sys.version_info < (2, 3):
try:
import logging
except:
import logging22 as logging
import select
PY22 = True
import socket
if not hasattr(socket, 'timeout'):
class timeout(socket.error): pass
socket.timeout = timeout
del timeout
else:
import logging
PY22 = False
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
| mit | -7,531,929,824,132,645,000 | -4,619,936,015,470,390,000 | 31.214286 | 80 | 0.71052 | false |
netzkolchose/django-cms | cms/migrations/0002_auto_20140816_1918.py | 45 | 8472 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import cms.models.static_placeholder
import cms.models.fields
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models, migrations
import django.utils.timezone
User = get_user_model()
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PageUser',
fields=[
(user_ptr_name, models.OneToOneField(primary_key=True, to=settings.AUTH_USER_MODEL, auto_created=True, parent_link=True, serialize=False)),
('created_by', models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='created_users')),
],
options={
'verbose_name': 'User (page)',
'verbose_name_plural': 'Users (page)',
},
bases=(user_model_label,),
),
migrations.CreateModel(
name='PageUserGroup',
fields=[
('group_ptr', models.OneToOneField(primary_key=True, to='auth.Group', auto_created=True, parent_link=True, serialize=False)),
('created_by', models.ForeignKey(to=settings.AUTH_USER_MODEL, related_name='created_usergroups')),
],
options={
'verbose_name': 'User group (page)',
'verbose_name_plural': 'User groups (page)',
},
bases=('auth.group',),
),
migrations.CreateModel(
name='Placeholder',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('slot', models.CharField(db_index=True, max_length=50, verbose_name='slot', editable=False)),
('default_width', models.PositiveSmallIntegerField(null=True, verbose_name='width', editable=False)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='page',
name='placeholders',
field=models.ManyToManyField(to='cms.Placeholder', editable=False),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='page',
unique_together=set([('publisher_is_draft', 'application_namespace'), ('reverse_id', 'site', 'publisher_is_draft')]),
),
migrations.AddField(
model_name='cmsplugin',
name='placeholder',
field=models.ForeignKey(null=True, to='cms.Placeholder', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='aliaspluginmodel',
name='alias_placeholder',
field=models.ForeignKey(null=True, to='cms.Placeholder', related_name='alias_placeholder', editable=False),
preserve_default=True,
),
migrations.CreateModel(
name='PlaceholderReference',
fields=[
('cmsplugin_ptr', models.OneToOneField(primary_key=True, to='cms.CMSPlugin', auto_created=True, parent_link=True, serialize=False)),
('name', models.CharField(max_length=255)),
('placeholder_ref', cms.models.fields.PlaceholderField(null=True, to='cms.Placeholder', slotname='clipboard', editable=False)),
],
options={
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='StaticPlaceholder',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('name', models.CharField(max_length=255, default='', help_text='Descriptive name to identify this static placeholder. Not displayed to users.', blank=True, verbose_name='static placeholder name')),
('code', models.CharField(max_length=255, verbose_name='placeholder code', help_text='To render the static placeholder in templates.', blank=True)),
('dirty', models.BooleanField(default=False, editable=False)),
('creation_method', models.CharField(max_length=20, default='code', blank=True, verbose_name='creation_method', choices=cms.models.static_placeholder.StaticPlaceholder.CREATION_METHODS)),
('draft', cms.models.fields.PlaceholderField(null=True, to='cms.Placeholder', verbose_name='placeholder content', related_name='static_draft', slotname=cms.models.static_placeholder.static_slotname, editable=False)),
('public', cms.models.fields.PlaceholderField(null=True, to='cms.Placeholder', slotname=cms.models.static_placeholder.static_slotname, related_name='static_public', editable=False)),
('site', models.ForeignKey(null=True, to='sites.Site', blank=True)),
],
options={
'verbose_name': 'static placeholder',
'verbose_name_plural': 'static placeholders',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='staticplaceholder',
unique_together=set([('code', 'site')]),
),
migrations.CreateModel(
name='Title',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('language', models.CharField(db_index=True, max_length=15, verbose_name='language')),
('title', models.CharField(max_length=255, verbose_name='title')),
('page_title', models.CharField(max_length=255, null=True, help_text='overwrite the title (html title tag)', blank=True, verbose_name='title')),
('menu_title', models.CharField(max_length=255, null=True, help_text='overwrite the title in the menu', blank=True, verbose_name='title')),
('meta_description', models.TextField(max_length=155, null=True, help_text='The text displayed in search engines.', blank=True, verbose_name='description')),
('slug', models.SlugField(max_length=255, verbose_name='slug')),
('path', models.CharField(db_index=True, max_length=255, verbose_name='Path')),
('has_url_overwrite', models.BooleanField(db_index=True, default=False, editable=False, verbose_name='has url overwrite')),
('redirect', models.CharField(max_length=255, null=True, blank=True, verbose_name='redirect')),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='creation date', editable=False)),
('published', models.BooleanField(default=False, verbose_name='is published')),
('publisher_is_draft', models.BooleanField(db_index=True, default=True, editable=False)),
('publisher_state', models.SmallIntegerField(db_index=True, default=0, editable=False)),
('page', models.ForeignKey(to='cms.Page', verbose_name='page', related_name='title_set')),
('publisher_public', models.OneToOneField(null=True, to='cms.Title', related_name='publisher_draft', editable=False)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='title',
unique_together=set([('language', 'page')]),
),
migrations.CreateModel(
name='UserSettings',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('language', models.CharField(max_length=10, choices=settings.LANGUAGES, help_text='The language for the admin interface and toolbar', verbose_name='Language')),
('clipboard', models.ForeignKey(null=True, to='cms.Placeholder', blank=True, editable=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, unique=True, related_name='djangocms_usersettings', editable=False)),
],
options={
'verbose_name': 'user setting',
'verbose_name_plural': 'user settings',
},
bases=(models.Model,),
),
]
| bsd-3-clause | 4,753,989,109,736,192,000 | -8,826,463,272,443,657,000 | 53.307692 | 232 | 0.596671 | false |
sankhesh/VTK | Interaction/Widgets/Testing/Python/TestBoxWidget.py | 26 | 3843 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
=========================================================================
Program: Visualization Toolkit
Module: TestNamedColorsIntegration.py
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================
'''
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
class TestBoxWidget(vtk.test.Testing.vtkTest):
def testBoxWidget(self):
# Demonstrate how to use the vtkBoxWidget.
# This script uses a 3D box widget to define a "clipping box" to clip some
# simple geometry (a mace). Make sure that you hit the "W" key to activate the widget.
# create a sphere source
#
sphere = vtk.vtkSphereSource()
cone = vtk.vtkConeSource()
glyph = vtk.vtkGlyph3D()
glyph.SetInputConnection(sphere.GetOutputPort())
glyph.SetSourceConnection(cone.GetOutputPort())
glyph.SetVectorModeToUseNormal()
glyph.SetScaleModeToScaleByVector()
glyph.SetScaleFactor(0.25)
apd = vtk.vtkAppendPolyData()
apd.AddInputConnection(glyph.GetOutputPort())
apd.AddInputConnection(sphere.GetOutputPort())
maceMapper = vtk.vtkPolyDataMapper()
maceMapper.SetInputConnection(apd.GetOutputPort())
maceActor = vtk.vtkLODActor()
maceActor.SetMapper(maceMapper)
maceActor.VisibilityOn()
planes = vtk.vtkPlanes()
clipper = vtk.vtkClipPolyData()
clipper.SetInputConnection(apd.GetOutputPort())
clipper.SetClipFunction(planes)
clipper.InsideOutOn()
selectMapper = vtk.vtkPolyDataMapper()
selectMapper.SetInputConnection(clipper.GetOutputPort())
selectActor = vtk.vtkLODActor()
selectActor.SetMapper(selectMapper)
selectActor.GetProperty().SetColor(0, 1, 0)
selectActor.VisibilityOff()
selectActor.SetScale(1.01, 1.01, 1.01)
# Create the RenderWindow, Renderer and both Actors
#
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iRen = vtk.vtkRenderWindowInteractor()
iRen.SetRenderWindow(renWin);
boxWidget = vtk.vtkBoxWidget()
boxWidget.SetInteractor(iRen)
ren.AddActor(maceActor)
ren.AddActor(selectActor)
# Add the actors to the renderer, set the background and size
#
ren.SetBackground(0.1, 0.2, 0.4)
renWin.SetSize(300, 300)
def SelectPolygons(widget, event_string):
'''
The callback takes two parameters.
Parameters:
widget - the object that generates the event.
event_string - the event name (which is a string).
'''
boxWidget, selectActor
boxWidget.GetPlanes(planes)
selectActor.VisibilityOn()
# place the interactor initially
boxWidget.SetInputConnection(glyph.GetOutputPort())
boxWidget.PlaceWidget()
boxWidget.AddObserver("EndInteractionEvent", SelectPolygons)
# render and interact with data
renWin.Render()
img_file = "TestBoxWidget.png"
vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestBoxWidget, 'test')])
| bsd-3-clause | 4,930,200,867,461,844,000 | 1,763,026,737,184,519,700 | 33.936364 | 119 | 0.633359 | false |
Elettronik/SickRage | lib/requests/packages/urllib3/poolmanager.py | 137 | 16345 | from __future__ import absolute_import
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
'ssl_version', 'ca_cert_dir', 'ssl_context')
# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
'key_scheme', # str
'key_host', # str
'key_port', # int
'key_timeout', # int or float or Timeout
'key_retries', # int or Retry
'key_strict', # bool
'key_block', # bool
'key_source_address', # str
'key_key_file', # str
'key_cert_file', # str
'key_cert_reqs', # str
'key_ca_certs', # str
'key_ssl_version', # str
'key_ca_cert_dir', # str
'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
'key_maxsize', # int
'key_headers', # dict
'key__proxy', # parsed proxy url
'key__proxy_headers', # dict
'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples
'key__socks_options', # dict
'key_assert_hostname', # bool or string
'key_assert_fingerprint', # str
)
#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple('PoolKey', _key_fields)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:type key_class: namedtuple
:param request_context:
A dictionary-like object that contain the context for a request.
:type request_context: dict
:return: A namedtuple that can be used as a connection pool key.
:rtype: PoolKey
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context['scheme'] = context['scheme'].lower()
context['host'] = context['host'].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ('headers', '_proxy_headers', '_socks_options'):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get('socket_options')
if socket_opts is not None:
context['socket_options'] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context['key_' + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
return key_class(**context)
#: A dictionary that maps a scheme to a callable that creates a pool key.
#: This can be used to alter the way pool keys are constructed, if desired.
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
'http': functools.partial(_default_key_normalizer, PoolKey),
'https': functools.partial(_default_key_normalizer, PoolKey),
}
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port, request_context=None):
"""
Create a new :class:`ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ('scheme', 'host', 'port'):
request_context.pop(key, None)
if scheme == 'http':
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
"""
Get a :class:`ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context['scheme'] = scheme or 'http'
if not port:
port = port_by_scheme.get(request_context['scheme'].lower(), 80)
request_context['port'] = port
request_context['host'] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context['scheme'].lower()
pool_key_constructor = self.key_fn_by_scheme[scheme]
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
Get a :class:`ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context['scheme']
host = request_context['host']
port = request_context['port']
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url, pool_kwargs=None):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(u.host, port=u.port, scheme=u.scheme,
pool_kwargs=pool_kwargs)
def _merge_pool_kwargs(self, override):
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw['assert_same_host'] = False
kw['redirect'] = False
if 'headers' not in kw:
kw['headers'] = self.headers
if self.proxy is not None and u.scheme == "http":
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 7231, Section 6.4.4
if response.status == 303:
method = 'GET'
retries = kw.get('retries')
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
kw['retries'] = retries
kw['redirect'] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary contaning headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(self, proxy_url, num_pools=10, headers=None,
proxy_headers=None, **connection_pool_kw):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
proxy_url.port)
proxy = parse_url(proxy_url)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
connection_pool_kw['_proxy'] = self.proxy
connection_pool_kw['_proxy_headers'] = self.proxy_headers
super(ProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme, pool_kwargs=pool_kwargs)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {'Accept': '*/*'}
netloc = parse_url(url).netloc
if netloc:
headers_['Host'] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if u.scheme == "http":
# For proxied HTTPS requests, httplib sets the necessary headers
# on the CONNECT to the proxy. For HTTP, we'll definitely
# need to set 'Host' at the very least.
headers = kw.get('headers', self.headers)
kw['headers'] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
| gpl-3.0 | -1,066,703,989,218,248,000 | 7,193,119,935,842,785,000 | 36.147727 | 92 | 0.620985 | false |
lewismc/nutchpy | setup.py | 8 | 4958 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import os
import sys
from fnmatch import fnmatchcase
from distutils.core import Command, setup
from distutils.util import convert_path
import glob
import subprocess
import shutil
#------------------------------------------------------------------------
# Top Level Packages
#------------------------------------------------------------------------
def find_packages(where='.', exclude=()):
out = []
stack = [(convert_path(where), '')]
while stack:
where, prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
if ('.' not in name and os.path.isdir(fn) and
os.path.isfile(os.path.join(fn, '__init__.py'))
):
out.append(prefix+name)
stack.append((fn, prefix+name+'.'))
if sys.version_info[0] == 3:
exclude = exclude + ('*py2only*', )
for pat in list(exclude) + ['ez_setup', 'distribute_setup']:
out = [item for item in out if not fnmatchcase(item, pat)]
return out
packages = find_packages()
if sys.platform == 'win32':
dir_sep = '\\'
else:
dir_sep = '/'
def get_data_files():
data_files = []
root = os.path.join("nutchpy","ex_data")
##scan catalog for files with the above extensions and add to pkg_data_dirs
for path, dirs, files in os.walk(root):
for fs in files:
#remove nutchpy from path name
install_path = dir_sep.join(path.split(dir_sep)[1:])
data_files.append(os.path.join(install_path,fs))
return data_files
package_data = dict(nutchpy=get_data_files())
#------------------------------------------------------------------------
# Commands
#------------------------------------------------------------------------
class CleanCommand(Command):
"""Custom distutils command to clean the .so and .pyc files."""
user_options = []
def initialize_options(self):
self._clean_me = []
self._clean_trees = []
for toplevel in packages:
for root, dirs, files in list(os.walk(toplevel)):
for f in files:
if os.path.splitext(f)[-1] in ('.pyc', '.so', '.o', '.pyd', '.jar'):
self._clean_me.append(os.path.join(root, f))
for d in ('build',):
if os.path.exists(d):
self._clean_trees.append(d)
def finalize_options(self):
pass
def run(self):
for clean_me in self._clean_me:
try:
print('flushing', clean_me)
os.unlink(clean_me)
except Exception:
pass
for clean_tree in self._clean_trees:
try:
print('flushing', clean_tree)
shutil.rmtree(clean_tree)
except Exception:
pass
#------------------------------------------------------------------------
# Setup
#------------------------------------------------------------------------
longdesc = open('README.md').read()
#------------------------------------------------------------------------
# Optional building with MAVEN
#------------------------------------------------------------------------
if not 'nojava' in sys.argv:
JAVA_SRC = "seqreader-app"
os.chdir(JAVA_SRC)
build_cmd = "mvn package"
os.system(build_cmd)
# subprocess.check_call(build_cmd, shell=os.name != 'nt',
# stdout=subprocess.PIPE, stderr=subprocess.PIPE)
os.chdir("..")
jar_file = os.path.join(JAVA_SRC,"target",
"seqreader-app-1.0-SNAPSHOT-jar-with-dependencies.jar")
java_lib_dir = os.path.join("nutchpy","java_libs")
if not os.path.exists(java_lib_dir):
os.mkdir(java_lib_dir)
shutil.copy(jar_file,java_lib_dir)
else:
assert 'nojava' == sys.argv.pop(2)
jar_file_list = glob.glob("nutchpy/java_libs/*")
jar_file_list = [os.path.relpath(path,start='nutchpy') for path in jar_file_list]
package_data['nutchpy'] = package_data['nutchpy']+jar_file_list
setup(
name='nutchpy',
version='0.1',
author='Continuum Analytics',
author_email='[email protected]',
description='nutchpy',
long_description=longdesc,
license='BSD',
platforms = ['any'],
install_requires=['py4j>=0.8.2.1'],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Intended Audience :: Education',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Big Data',
'Topic :: Java',
],
packages=packages,
package_data=package_data,
cmdclass = {
'clean': CleanCommand,
}
)
| bsd-2-clause | -4,422,126,452,387,371,000 | 4,756,134,128,287,669,000 | 28.164706 | 88 | 0.507059 | false |
ohmini/thaifoodapi | lib/django/contrib/sessions/models.py | 347 | 1298 | from __future__ import unicode_literals
from django.contrib.sessions.base_session import (
AbstractBaseSession, BaseSessionManager,
)
class SessionManager(BaseSessionManager):
use_in_migrations = True
class Session(AbstractBaseSession):
"""
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
The Django sessions framework is entirely cookie-based. It does
not fall back to putting session IDs in URLs. This is an intentional
design decision. Not only does that behavior make URLs ugly, it makes
your site vulnerable to session-ID theft via the "Referer" header.
For complete documentation on using Sessions in your code, consult
the sessions documentation that is shipped with Django (also available
on the Django Web site).
"""
objects = SessionManager()
@classmethod
def get_session_store_class(cls):
from django.contrib.sessions.backends.db import SessionStore
return SessionStore
class Meta(AbstractBaseSession.Meta):
db_table = 'django_session'
| bsd-3-clause | -8,376,135,492,770,292,000 | 4,423,129,998,184,560,600 | 34.081081 | 74 | 0.74037 | false |
ehirt/odoo | addons/fetchmail/fetchmail.py | 6 | 15874 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import poplib
import time
from imaplib import IMAP4
from imaplib import IMAP4_SSL
from poplib import POP3
from poplib import POP3_SSL
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import zipfile
import base64
from openerp import addons
from openerp.osv import fields, osv
from openerp import tools, api
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MAX_POP_MESSAGES = 50
MAIL_TIMEOUT = 60
# Workaround for Python 2.7.8 bug https://bugs.python.org/issue23906
poplib._MAXLINE = 65536
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
_name = 'fetchmail.server'
_description = "POP/IMAP Server"
_order = 'priority'
_columns = {
'name':fields.char('Name', required=True, readonly=False),
'active':fields.boolean('Active', required=False),
'state':fields.selection([
('draft', 'Not Confirmed'),
('done', 'Confirmed'),
], 'Status', select=True, readonly=True, copy=False),
'server' : fields.char('Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}),
'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}),
'type':fields.selection([
('pop', 'POP Server'),
('imap', 'IMAP Server'),
('local', 'Local Server'),
], 'Server Type', select=True, required=True, readonly=False),
'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"),
'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. "
"If not enabled, incoming emails will be stripped of any attachments before being processed"),
'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference"
"and attached to each processed message. This will usually double the size of your message database."),
'date': fields.datetime('Last Fetch Date', readonly=True),
'user' : fields.char('Username', readonly=True, states={'draft':[('readonly', False)]}),
'password' : fields.char('Password', readonly=True, states={'draft':[('readonly', False)]}),
'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, "
"on the record that was created or updated by this mail"),
'object_id': fields.many2one('ir.model', "Create a New Record", help="Process each incoming mail as part of a conversation "
"corresponding to this document type. This will create "
"new documents for new conversations, or attach follow-up "
"emails to the existing conversations (documents)."),
'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, "
"lower values mean higher priority"),
'message_ids': fields.one2many('mail.mail', 'fetchmail_server_id', 'Messages', readonly=True),
'configuration' : fields.text('Configuration', readonly=True),
'script' : fields.char('Script', readonly=True),
}
_defaults = {
'state': "draft",
'type': "pop",
'active': True,
'priority': 5,
'attach': True,
'script': '/mail/static/scripts/openerp_mailgate.py',
}
def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False):
port = 0
values = {}
if server_type == 'pop':
port = ssl and 995 or 110
elif server_type == 'imap':
port = ssl and 993 or 143
else:
values['server'] = ''
values['port'] = port
conf = {
'dbname' : cr.dbname,
'uid' : uid,
'model' : 'MODELNAME',
}
if object_id:
m = self.pool.get('ir.model')
r = m.read(cr,uid,[object_id],['model'])
conf['model']=r[0]['model']
values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA)
openerp_mailgate.py --host=HOSTNAME --port=PORT -u %(uid)d -p PASSWORD -d %(dbname)s
Example configuration for the postfix mta running locally:
/etc/postfix/virtual_aliases:
@youdomain openerp_mailgate@localhost
/etc/aliases:
openerp_mailgate: "|/path/to/openerp-mailgate.py --host=localhost -u %(uid)d -p PASSWORD -d %(dbname)s"
""" % conf
return {'value':values}
def set_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids , {'state':'draft'})
return True
@api.cr_uid_ids_context
def connect(self, cr, uid, server_id, context=None):
if isinstance(server_id, (list,tuple)):
server_id = server_id[0]
server = self.browse(cr, uid, server_id, context)
if server.type == 'imap':
if server.is_ssl:
connection = IMAP4_SSL(server.server, int(server.port))
else:
connection = IMAP4(server.server, int(server.port))
connection.login(server.user, server.password)
elif server.type == 'pop':
if server.is_ssl:
connection = POP3_SSL(server.server, int(server.port))
else:
connection = POP3(server.server, int(server.port))
#TODO: use this to remove only unread messages
#connection.user("recent:"+server.user)
connection.user(server.user)
connection.pass_(server.password)
# Add timeout on socket
connection.sock.settimeout(MAIL_TIMEOUT)
return connection
def button_confirm_login(self, cr, uid, ids, context=None):
if context is None:
context = {}
for server in self.browse(cr, uid, ids, context=context):
try:
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
_logger.exception("Failed to connect to %s server %s.", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s.") % tools.ustr(e))
finally:
try:
if connection:
if server.type == 'imap':
connection.close()
elif server.type == 'pop':
connection.quit()
except Exception:
# ignored, just a consequence of the previous exception
pass
return True
def _fetch_mails(self, cr, uid, ids=False, context=None):
if not ids:
ids = self.search(cr, uid, [('state','=','done'),('type','in',['pop','imap'])])
return self.fetch_mail(cr, uid, ids, context=context)
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only - will commit() after each email!"""
context = dict(context or {})
context['fetchmail_cron_running'] = True
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
_logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count, failed = 0, 0
imap_server = False
pop_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
imap_server.select()
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
res_id = None
result, data = imap_server.fetch(num, '(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
imap_server.store(num, '+FLAGS', '\\Seen')
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
cr.commit()
count += 1
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
try:
while True:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, min(MAX_POP_MESSAGES, numMsgs) + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = None
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
msg,
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
pop_server.dele(num)
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
cr.commit()
if numMsgs < MAX_POP_MESSAGES:
break
pop_server.quit()
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", numMsgs, server.type, server.name, (numMsgs - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if pop_server:
pop_server.quit()
server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)})
return True
def _update_cron(self, cr, uid, context=None):
if context and context.get('fetchmail_cron_running'):
return
try:
cron = self.pool['ir.model.data'].get_object(
cr, uid, 'fetchmail', 'ir_cron_mail_gateway_action', context=context)
except ValueError:
# Nevermind if default cron cannot be found
return
# Enabled/Disable cron based on the number of 'done' server of type pop or imap
cron.toggle(model=self._name, domain=[('state','=','done'), ('type','in',['pop','imap'])])
def create(self, cr, uid, values, context=None):
res = super(fetchmail_server, self).create(cr, uid, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(fetchmail_server, self).write(cr, uid, ids, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(fetchmail_server, self).unlink(cr, uid, ids, context=context)
self._update_cron(cr, uid, context=context)
return res
class mail_mail(osv.osv):
_inherit = "mail.mail"
_columns = {
'fetchmail_server_id': fields.many2one('fetchmail.server', "Inbound Mail Server",
readonly=True,
select=True,
oldname='server_id'),
}
def create(self, cr, uid, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).create(cr, uid, values, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).write(cr, uid, ids, values, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 8,242,811,745,607,126,000 | 5,640,785,088,783,981,000 | 48.145511 | 195 | 0.525702 | false |
fafaman/django | django/contrib/gis/db/backends/base/models.py | 434 | 7111 | import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(
r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)'
r'(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)",'
r'"(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,'
r'AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$'
)
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrieve the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
| bsd-3-clause | -5,641,564,485,339,582,000 | -152,816,133,925,936,740 | 31.619266 | 115 | 0.557868 | false |
blrm/openshift-tools | openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_utils/filter_plugins/openshift_hosted_filters.py | 44 | 1480 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Custom filters for use in openshift_hosted
'''
class FilterModule(object):
''' Custom ansible filters for use by openshift_hosted role'''
@staticmethod
def get_router_replicas(replicas=None, router_nodes=None):
''' This function will return the number of replicas
based on the results from the defined
openshift_hosted_router_replicas OR
the query from oc_obj on openshift nodes with a selector OR
default to 1
'''
# We always use what they've specified if they've specified a value
if replicas is not None:
return replicas
replicas = 1
# Ignore boolean expression limit of 5.
# pylint: disable=too-many-boolean-expressions
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
isinstance(router_nodes['results']['results'], list) and
len(router_nodes['results']['results']) > 0 and
'items' in router_nodes['results']['results'][0]):
if len(router_nodes['results']['results'][0]['items']) > 0:
replicas = len(router_nodes['results']['results'][0]['items'])
return replicas
def filters(self):
''' returns a mapping of filters to methods '''
return {'get_router_replicas': self.get_router_replicas}
| apache-2.0 | -5,160,579,368,843,024,000 | -8,382,151,529,198,987,000 | 34.238095 | 78 | 0.598649 | false |
LearnEra/LearnEraPlaftform | common/lib/chem/chem/miller.py | 46 | 9239 | """ Calculation of Miller indices """
import numpy as np
import math
import fractions as fr
import decimal
import json
def lcm(a, b):
"""
Returns least common multiple of a, b
Args:
a, b: floats
Returns:
float
"""
return a * b / fr.gcd(a, b)
def segment_to_fraction(distance):
"""
Converts lengths of which the plane cuts the axes to fraction.
Tries convert distance to closest nicest fraction with denominator less or
equal than 10. It is
purely for simplicity and clearance of learning purposes. Jenny: 'In typical
courses students usually do not encounter indices any higher than 6'.
If distance is not a number (numpy nan), it means that plane is parallel to
axis or contains it. Inverted fraction to nan (nan is 1/0) = 0 / 1 is
returned
Generally (special cases):
a) if distance is smaller than some constant, i.g. 0.01011,
than fraction's denominator usually much greater than 10.
b) Also, if student will set point on 0.66 -> 1/3, so it is 333 plane,
But if he will slightly move the mouse and click on 0.65 -> it will be
(16,15,16) plane. That's why we are doing adjustments for points coordinates,
to the closest tick, tick + tick / 2 value. And now UI sends to server only
values multiple to 0.05 (half of tick). Same rounding is implemented for
unittests.
But if one will want to calculate miller indices with exact coordinates and
with nice fractions (which produce small Miller indices), he may want shift
to new origin if segments are like S = (0.015, > 0.05, >0.05) - close to zero
in one coordinate. He may update S to (0, >0.05, >0.05) and shift origin.
In this way he can receive nice small fractions. Also there is can be
degenerated case when S = (0.015, 0.012, >0.05) - if update S to (0, 0, >0.05) -
it is a line. This case should be considered separately. Small nice Miller
numbers and possibility to create very small segments can not be implemented
at same time).
Args:
distance: float distance that plane cuts on axis, it must not be 0.
Distance is multiple of 0.05.
Returns:
Inverted fraction.
0 / 1 if distance is nan
"""
if np.isnan(distance):
return fr.Fraction(0, 1)
else:
fract = fr.Fraction(distance).limit_denominator(10)
return fr.Fraction(fract.denominator, fract.numerator)
def sub_miller(segments):
'''
Calculates Miller indices from segments.
Algorithm:
1. Obtain inverted fraction from segments
2. Find common denominator of inverted fractions
3. Lead fractions to common denominator and throws denominator away.
4. Return obtained values.
Args:
List of 3 floats, meaning distances that plane cuts on x, y, z axes.
Any float not equals zero, it means that plane does not intersect origin,
i. e. shift of origin has already been done.
Returns:
String that represents Miller indices, e.g: (-6,3,-6) or (2,2,2)
'''
fracts = [segment_to_fraction(segment) for segment in segments]
common_denominator = reduce(lcm, [fract.denominator for fract in fracts])
miller = ([fract.numerator * math.fabs(common_denominator) /
fract.denominator for fract in fracts])
return'(' + ','.join(map(str, map(decimal.Decimal, miller))) + ')'
def miller(points):
"""
Calculates Miller indices from points.
Algorithm:
1. Calculate normal vector to a plane that goes trough all points.
2. Set origin.
3. Create Cartesian coordinate system (Ccs).
4. Find the lengths of segments of which the plane cuts the axes. Equation
of a line for axes: Origin + (Coordinate_vector - Origin) * parameter.
5. If plane goes trough Origin:
a) Find new random origin: find unit cube vertex, not crossed by a plane.
b) Repeat 2-4.
c) Fix signs of segments after Origin shift. This means to consider
original directions of axes. I.g.: Origin was 0,0,0 and became
new_origin. If new_origin has same Y coordinate as Origin, then segment
does not change its sign. But if new_origin has another Y coordinate than
origin (was 0, became 1), than segment has to change its sign (it now
lies on negative side of Y axis). New Origin 0 value of X or Y or Z
coordinate means that segment does not change sign, 1 value -> does
change. So new sign is (1 - 2 * new_origin): 0 -> 1, 1 -> -1
6. Run function that calculates miller indices from segments.
Args:
List of points. Each point is list of float coordinates. Order of
coordinates in point's list: x, y, z. Points are different!
Returns:
String that represents Miller indices, e.g: (-6,3,-6) or (2,2,2)
"""
N = np.cross(points[1] - points[0], points[2] - points[0])
O = np.array([0, 0, 0])
P = points[0] # point of plane
Ccs = map(np.array, [[1.0, 0, 0], [0, 1.0, 0], [0, 0, 1.0]])
segments = ([np.dot(P - O, N) / np.dot(ort, N) if np.dot(ort, N) != 0 else
np.nan for ort in Ccs])
if any(x == 0 for x in segments): # Plane goes through origin.
vertices = [ # top:
np.array([1.0, 1.0, 1.0]),
np.array([0.0, 0.0, 1.0]),
np.array([1.0, 0.0, 1.0]),
np.array([0.0, 1.0, 1.0]),
# bottom, except 0,0,0:
np.array([1.0, 0.0, 0.0]),
np.array([0.0, 1.0, 0.0]),
np.array([1.0, 1.0, 1.0]),
]
for vertex in vertices:
if np.dot(vertex - O, N) != 0: # vertex not in plane
new_origin = vertex
break
# obtain new axes with center in new origin
X = np.array([1 - new_origin[0], new_origin[1], new_origin[2]])
Y = np.array([new_origin[0], 1 - new_origin[1], new_origin[2]])
Z = np.array([new_origin[0], new_origin[1], 1 - new_origin[2]])
new_Ccs = [X - new_origin, Y - new_origin, Z - new_origin]
segments = ([np.dot(P - new_origin, N) / np.dot(ort, N) if
np.dot(ort, N) != 0 else np.nan for ort in new_Ccs])
# fix signs of indices: 0 -> 1, 1 -> -1 (
segments = (1 - 2 * new_origin) * segments
return sub_miller(segments)
def grade(user_input, correct_answer):
'''
Grade crystallography problem.
Returns true if lattices are the same and Miller indices are same or minus
same. E.g. (2,2,2) = (2, 2, 2) or (-2, -2, -2). Because sign depends only
on student's selection of origin.
Args:
user_input, correct_answer: json. Format:
user_input: {"lattice":"sc","points":[["0.77","0.00","1.00"],
["0.78","1.00","0.00"],["0.00","1.00","0.72"]]}
correct_answer: {'miller': '(00-1)', 'lattice': 'bcc'}
"lattice" is one of: "", "sc", "bcc", "fcc"
Returns:
True or false.
'''
def negative(m):
"""
Change sign of Miller indices.
Args:
m: string with meaning of Miller indices. E.g.:
(-6,3,-6) -> (6, -3, 6)
Returns:
String with changed signs.
"""
output = ''
i = 1
while i in range(1, len(m) - 1):
if m[i] in (',', ' '):
output += m[i]
elif m[i] not in ('-', '0'):
output += '-' + m[i]
elif m[i] == '0':
output += m[i]
else:
i += 1
output += m[i]
i += 1
return '(' + output + ')'
def round0_25(point):
"""
Rounds point coordinates to closest 0.5 value.
Args:
point: list of float coordinates. Order of coordinates: x, y, z.
Returns:
list of coordinates rounded to closes 0.5 value
"""
rounded_points = []
for coord in point:
base = math.floor(coord * 10)
fractional_part = (coord * 10 - base)
aliquot0_25 = math.floor(fractional_part / 0.25)
if aliquot0_25 == 0.0:
rounded_points.append(base / 10)
if aliquot0_25 in (1.0, 2.0):
rounded_points.append(base / 10 + 0.05)
if aliquot0_25 == 3.0:
rounded_points.append(base / 10 + 0.1)
return rounded_points
user_answer = json.loads(user_input)
if user_answer['lattice'] != correct_answer['lattice']:
return False
points = [map(float, p) for p in user_answer['points']]
if len(points) < 3:
return False
# round point to closes 0.05 value
points = [round0_25(point) for point in points]
points = [np.array(point) for point in points]
# print miller(points), (correct_answer['miller'].replace(' ', ''),
# negative(correct_answer['miller']).replace(' ', ''))
if miller(points) in (correct_answer['miller'].replace(' ', ''), negative(correct_answer['miller']).replace(' ', '')):
return True
return False
| agpl-3.0 | -5,507,049,338,105,942,000 | 9,143,113,302,810,325,000 | 33.602996 | 122 | 0.577335 | false |
aparo/django-nonrel | django/contrib/localflavor/at/forms.py | 71 | 2271 | """
AT-specific Form helpers
"""
import re
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import Field, RegexField, Select
from django.forms import ValidationError
re_ssn = re.compile(r'^\d{4} \d{6}')
class ATZipCodeField(RegexField):
"""
A form field that validates its input is an Austrian postcode.
Accepts 4 digits.
"""
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXX.'),
}
def __init__(self, *args, **kwargs):
super(ATZipCodeField, self).__init__(r'^\d{4}$',
max_length=None, min_length=None, *args, **kwargs)
class ATStateSelect(Select):
"""
A Select widget that uses a list of AT states as its choices.
"""
def __init__(self, attrs=None):
from django.contrib.localflavor.at.at_states import STATE_CHOICES
super(ATStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class ATSocialSecurityNumberField(Field):
"""
Austrian Social Security numbers are composed of a 4 digits and 6 digits
field. The latter represents in most cases the person's birthdate while
the first 4 digits represent a 3-digits counter and a one-digit checksum.
The 6-digits field can also differ from the person's birthdate if the
3-digits counter suffered an overflow.
This code is based on information available on
http://de.wikipedia.org/wiki/Sozialversicherungsnummer#.C3.96sterreich
"""
default_error_messages = {
'invalid': _(u'Enter a valid Austrian Social Security Number in XXXX XXXXXX format.'),
}
def clean(self, value):
if not re_ssn.search(value):
raise ValidationError(self.error_messages['invalid'])
sqnr, date = value.split(" ")
sqnr, check = (sqnr[:3], (sqnr[3]))
if int(sqnr) < 100:
raise ValidationError(self.error_messages['invalid'])
res = int(sqnr[0])*3 + int(sqnr[1])*7 + int(sqnr[2])*9 \
+ int(date[0])*5 + int(date[1])*8 + int(date[2])*4 \
+ int(date[3])*2 + int(date[4])*1 + int(date[5])*6
res = res % 11
if res != int(check):
raise ValidationError(self.error_messages['invalid'])
return u'%s%s %s'%(sqnr, check, date,)
| bsd-3-clause | -4,668,601,000,011,485,000 | -2,440,150,266,566,155,000 | 33.938462 | 94 | 0.636724 | false |
40223236/w16b_test | static/Brython3.1.1-20150328-091302/Lib/_struct.py | 726 | 13787 | #
# This module is a pure Python version of pypy.module.struct.
# It is only imported if the vastly faster pypy.module.struct is not
# compiled in. For now we keep this version for reference and
# because pypy.module.struct is not ootype-backend-friendly yet.
#
# this module 'borrowed' from
# https://bitbucket.org/pypy/pypy/src/18626459a9b2/lib_pypy/_struct.py?at=py3k-listview_str
"""Functions to convert between Python values and C structs.
Python strings are used to hold the data representing the C struct
and also as format strings to describe the layout of data in the C struct.
The optional first format char indicates byte order, size and alignment:
@: native order, size & alignment (default)
=: native order, std. size & alignment
<: little-endian, std. size & alignment
>: big-endian, std. size & alignment
!: same as >
The remaining chars indicate types of args and must match exactly;
these can be preceded by a decimal repeat count:
x: pad byte (no data);
c:char;
b:signed byte;
B:unsigned byte;
h:short;
H:unsigned short;
i:int;
I:unsigned int;
l:long;
L:unsigned long;
f:float;
d:double.
Special cases (preceding decimal count indicates length):
s:string (array of char); p: pascal string (with count byte).
Special case (only available in native format):
P:an integer type that is wide enough to hold a pointer.
Special case (not in native mode unless 'long long' in platform C):
q:long long;
Q:unsigned long long
Whitespace between formats is ignored.
The variable struct.error is an exception raised on errors."""
import math, sys
# TODO: XXX Find a way to get information on native sizes and alignments
class StructError(Exception):
pass
error = StructError
def unpack_int(data,index,size,le):
bytes = [b for b in data[index:index+size]]
if le == 'little':
bytes.reverse()
number = 0
for b in bytes:
number = number << 8 | b
return int(number)
def unpack_signed_int(data,index,size,le):
number = unpack_int(data,index,size,le)
max = 2**(size*8)
if number > 2**(size*8 - 1) - 1:
number = int(-1*(max - number))
return number
INFINITY = 1e200 * 1e200
NAN = INFINITY / INFINITY
def unpack_char(data,index,size,le):
return data[index:index+size]
def pack_int(number,size,le):
x=number
res=[]
for i in range(size):
res.append(x&0xff)
x >>= 8
if le == 'big':
res.reverse()
return bytes(res)
def pack_signed_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number > 2**(8*size-1)-1 or number < -1*2**(8*size-1):
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_unsigned_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number < 0:
raise TypeError("can't convert negative long to unsigned")
if number > 2**(8*size)-1:
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_char(char,size,le):
return bytes(char)
def isinf(x):
return x != 0.0 and x / 2 == x
def isnan(v):
return v != v*1.0 or (v == 1.0 and v == 2.0)
def pack_float(x, size, le):
unsigned = float_pack(x, size)
result = []
for i in range(8):
result.append((unsigned >> (i * 8)) & 0xFF)
if le == "big":
result.reverse()
return bytes(result)
def unpack_float(data, index, size, le):
binary = [data[i] for i in range(index, index + 8)]
if le == "big":
binary.reverse()
unsigned = 0
for i in range(8):
unsigned |= binary[i] << (i * 8)
return float_unpack(unsigned, size, le)
def round_to_nearest(x):
"""Python 3 style round: round a float x to the nearest int, but
unlike the builtin Python 2.x round function:
- return an int, not a float
- do round-half-to-even, not round-half-away-from-zero.
We assume that x is finite and nonnegative; except wrong results
if you use this for negative x.
"""
int_part = int(x)
frac_part = x - int_part
if frac_part > 0.5 or frac_part == 0.5 and int_part & 1 == 1:
int_part += 1
return int_part
def float_unpack(Q, size, le):
"""Convert a 32-bit or 64-bit integer created
by float_pack into a Python float."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
if Q >> BITS:
raise ValueError("input out of range")
# extract pieces
sign = Q >> BITS - 1
exp = (Q & ((1 << BITS - 1) - (1 << MANT_DIG - 1))) >> MANT_DIG - 1
mant = Q & ((1 << MANT_DIG - 1) - 1)
if exp == MAX_EXP - MIN_EXP + 2:
# nan or infinity
result = float('nan') if mant else float('inf')
elif exp == 0:
# subnormal or zero
result = math.ldexp(float(mant), MIN_EXP - MANT_DIG)
else:
# normal
mant += 1 << MANT_DIG - 1
result = math.ldexp(float(mant), exp + MIN_EXP - MANT_DIG - 1)
return -result if sign else result
def float_pack(x, size):
"""Convert a Python float x into a 64-bit unsigned integer
with the same byte representation."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
sign = math.copysign(1.0, x) < 0.0
if math.isinf(x):
mant = 0
exp = MAX_EXP - MIN_EXP + 2
elif math.isnan(x):
mant = 1 << (MANT_DIG-2) # other values possible
exp = MAX_EXP - MIN_EXP + 2
elif x == 0.0:
mant = 0
exp = 0
else:
m, e = math.frexp(abs(x)) # abs(x) == m * 2**e
exp = e - (MIN_EXP - 1)
if exp > 0:
# Normal case.
mant = round_to_nearest(m * (1 << MANT_DIG))
mant -= 1 << MANT_DIG - 1
else:
# Subnormal case.
if exp + MANT_DIG - 1 >= 0:
mant = round_to_nearest(m * (1 << exp + MANT_DIG - 1))
else:
mant = 0
exp = 0
# Special case: rounding produced a MANT_DIG-bit mantissa.
assert 0 <= mant <= 1 << MANT_DIG - 1
if mant == 1 << MANT_DIG - 1:
mant = 0
exp += 1
# Raise on overflow (in some circumstances, may want to return
# infinity instead).
if exp >= MAX_EXP - MIN_EXP + 2:
raise OverflowError("float too large to pack in this format")
# check constraints
assert 0 <= mant < 1 << MANT_DIG - 1
assert 0 <= exp <= MAX_EXP - MIN_EXP + 2
assert 0 <= sign <= 1
return ((sign << BITS - 1) | (exp << MANT_DIG - 1)) | mant
big_endian_format = {
'x':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'b':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'B':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'c':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_char, 'unpack' : unpack_char},
's':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'p':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'h':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'H':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'i':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'I':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'l':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'L':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'Q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'f':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
'd':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
}
default = big_endian_format
formatmode={ '<' : (default, 'little'),
'>' : (default, 'big'),
'!' : (default, 'big'),
'=' : (default, sys.byteorder),
'@' : (default, sys.byteorder)
}
def getmode(fmt):
try:
formatdef,endianness = formatmode[fmt[0]]
index = 1
except (IndexError, KeyError):
formatdef,endianness = formatmode['@']
index = 0
return formatdef,endianness,index
def getNum(fmt,i):
num=None
cur = fmt[i]
while ('0'<= cur ) and ( cur <= '9'):
if num == None:
num = int(cur)
else:
num = 10*num + int(cur)
i += 1
cur = fmt[i]
return num,i
def calcsize(fmt):
"""calcsize(fmt) -> int
Return size of C struct described by format string fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
num = 0
result = 0
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num != None :
result += num*format['size']
else:
result += format['size']
num = 0
i += 1
return result
def pack(fmt,*args):
"""pack(fmt, v1, v2, ...) -> string
Return string containing values v1, v2, ... packed according to fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
args = list(args)
n_args = len(args)
result = []
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num == None :
num_s = 0
num = 1
else:
num_s = num
if cur == 'x':
result += [b'\0'*num]
elif cur == 's':
if isinstance(args[0], bytes):
padding = num - len(args[0])
result += [args[0][:num] + b'\0'*padding]
args.pop(0)
else:
raise StructError("arg for string format not a string")
elif cur == 'p':
if isinstance(args[0], bytes):
padding = num - len(args[0]) - 1
if padding > 0:
result += [bytes([len(args[0])]) + args[0][:num-1] + b'\0'*padding]
else:
if num<255:
result += [bytes([num-1]) + args[0][:num-1]]
else:
result += [bytes([255]) + args[0][:num-1]]
args.pop(0)
else:
raise StructError("arg for string format not a string")
else:
if len(args) < num:
raise StructError("insufficient arguments to pack")
for var in args[:num]:
result += [format['pack'](var,format['size'],endianness)]
args=args[num:]
num = None
i += 1
if len(args) != 0:
raise StructError("too many arguments for pack format")
return b''.join(result)
def unpack(fmt,data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
j = 0
num = 0
result = []
length= calcsize(fmt)
if length != len (data):
raise StructError("unpack str size does not match format")
while i<len(fmt):
num,i=getNum(fmt,i)
cur = fmt[i]
i += 1
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if not num :
num = 1
if cur == 'x':
j += num
elif cur == 's':
result.append(data[j:j+num])
j += num
elif cur == 'p':
n=data[j]
if n >= num:
n = num-1
result.append(data[j+1:j+n+1])
j += num
else:
for n in range(num):
result += [format['unpack'](data,j,format['size'],endianness)]
j += format['size']
return tuple(result)
def pack_into(fmt, buf, offset, *args):
data = pack(fmt, *args)
buffer(buf)[offset:offset+len(data)] = data
def unpack_from(fmt, buf, offset=0):
size = calcsize(fmt)
data = buffer(buf)[offset:offset+size]
if len(data) != size:
raise error("unpack_from requires a buffer of at least %d bytes"
% (size,))
return unpack(fmt, data)
def _clearcache():
"Clear the internal cache."
# No cache in this implementation
| agpl-3.0 | 7,442,189,655,391,896,000 | 988,727,529,203,192,800 | 31.516509 | 95 | 0.547907 | false |
laperry1/android_external_chromium_org | third_party/tlslite/scripts/tls.py | 109 | 10946 | #!/usr/bin/env python
# Authors:
# Trevor Perrin
# Marcelo Fernandez - bugfix and NPN support
# Martin von Loewis - python 3 port
#
# See the LICENSE file for legal information regarding use of this file.
from __future__ import print_function
import sys
import os
import os.path
import socket
import time
import getopt
try:
import httplib
from SocketServer import *
from BaseHTTPServer import *
from SimpleHTTPServer import *
except ImportError:
# Python 3.x
from http import client as httplib
from socketserver import *
from http.server import *
if __name__ != "__main__":
raise "This must be run as a command, not used as a module!"
from tlslite.api import *
from tlslite import __version__
try:
from tack.structures.Tack import Tack
except ImportError:
pass
def printUsage(s=None):
if s:
print("ERROR: %s" % s)
print("")
print("Version: %s" % __version__)
print("")
print("RNG: %s" % prngName)
print("")
print("Modules:")
if tackpyLoaded:
print(" tackpy : Loaded")
else:
print(" tackpy : Not Loaded")
if m2cryptoLoaded:
print(" M2Crypto : Loaded")
else:
print(" M2Crypto : Not Loaded")
if pycryptoLoaded:
print(" pycrypto : Loaded")
else:
print(" pycrypto : Not Loaded")
if gmpyLoaded:
print(" GMPY : Loaded")
else:
print(" GMPY : Not Loaded")
print("")
print("""Commands:
server
[-k KEY] [-c CERT] [-t TACK] [-v VERIFIERDB] [-d DIR]
[--reqcert] HOST:PORT
client
[-k KEY] [-c CERT] [-u USER] [-p PASS]
HOST:PORT
""")
sys.exit(-1)
def printError(s):
"""Print error message and exit"""
sys.stderr.write("ERROR: %s\n" % s)
sys.exit(-1)
def handleArgs(argv, argString, flagsList=[]):
# Convert to getopt argstring format:
# Add ":" after each arg, ie "abc" -> "a:b:c:"
getOptArgString = ":".join(argString) + ":"
try:
opts, argv = getopt.getopt(argv, getOptArgString, flagsList)
except getopt.GetoptError as e:
printError(e)
# Default values if arg not present
privateKey = None
certChain = None
username = None
password = None
tacks = None
verifierDB = None
reqCert = False
directory = None
for opt, arg in opts:
if opt == "-k":
s = open(arg, "rb").read()
privateKey = parsePEMKey(s, private=True)
elif opt == "-c":
s = open(arg, "rb").read()
x509 = X509()
x509.parse(s)
certChain = X509CertChain([x509])
elif opt == "-u":
username = arg
elif opt == "-p":
password = arg
elif opt == "-t":
if tackpyLoaded:
s = open(arg, "rU").read()
tacks = Tack.createFromPemList(s)
elif opt == "-v":
verifierDB = VerifierDB(arg)
verifierDB.open()
elif opt == "-d":
directory = arg
elif opt == "--reqcert":
reqCert = True
else:
assert(False)
if not argv:
printError("Missing address")
if len(argv)>1:
printError("Too many arguments")
#Split address into hostname/port tuple
address = argv[0]
address = address.split(":")
if len(address) != 2:
raise SyntaxError("Must specify <host>:<port>")
address = ( address[0], int(address[1]) )
# Populate the return list
retList = [address]
if "k" in argString:
retList.append(privateKey)
if "c" in argString:
retList.append(certChain)
if "u" in argString:
retList.append(username)
if "p" in argString:
retList.append(password)
if "t" in argString:
retList.append(tacks)
if "v" in argString:
retList.append(verifierDB)
if "d" in argString:
retList.append(directory)
if "reqcert" in flagsList:
retList.append(reqCert)
return retList
def printGoodConnection(connection, seconds):
print(" Handshake time: %.3f seconds" % seconds)
print(" Version: %s" % connection.getVersionName())
print(" Cipher: %s %s" % (connection.getCipherName(),
connection.getCipherImplementation()))
if connection.session.srpUsername:
print(" Client SRP username: %s" % connection.session.srpUsername)
if connection.session.clientCertChain:
print(" Client X.509 SHA1 fingerprint: %s" %
connection.session.clientCertChain.getFingerprint())
if connection.session.serverCertChain:
print(" Server X.509 SHA1 fingerprint: %s" %
connection.session.serverCertChain.getFingerprint())
if connection.session.serverName:
print(" SNI: %s" % connection.session.serverName)
if connection.session.tackExt:
if connection.session.tackInHelloExt:
emptyStr = "\n (via TLS Extension)"
else:
emptyStr = "\n (via TACK Certificate)"
print(" TACK: %s" % emptyStr)
print(str(connection.session.tackExt))
print(" Next-Protocol Negotiated: %s" % connection.next_proto)
def clientCmd(argv):
(address, privateKey, certChain, username, password) = \
handleArgs(argv, "kcup")
if (certChain and not privateKey) or (not certChain and privateKey):
raise SyntaxError("Must specify CERT and KEY together")
if (username and not password) or (not username and password):
raise SyntaxError("Must specify USER with PASS")
if certChain and username:
raise SyntaxError("Can use SRP or client cert for auth, not both")
#Connect to server
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect(address)
connection = TLSConnection(sock)
settings = HandshakeSettings()
settings.useExperimentalTackExtension = True
try:
start = time.clock()
if username and password:
connection.handshakeClientSRP(username, password,
settings=settings, serverName=address[0])
else:
connection.handshakeClientCert(certChain, privateKey,
settings=settings, serverName=address[0])
stop = time.clock()
print("Handshake success")
except TLSLocalAlert as a:
if a.description == AlertDescription.user_canceled:
print(str(a))
else:
raise
sys.exit(-1)
except TLSRemoteAlert as a:
if a.description == AlertDescription.unknown_psk_identity:
if username:
print("Unknown username")
else:
raise
elif a.description == AlertDescription.bad_record_mac:
if username:
print("Bad username or password")
else:
raise
elif a.description == AlertDescription.handshake_failure:
print("Unable to negotiate mutually acceptable parameters")
else:
raise
sys.exit(-1)
printGoodConnection(connection, stop-start)
connection.close()
def serverCmd(argv):
(address, privateKey, certChain, tacks,
verifierDB, directory, reqCert) = handleArgs(argv, "kctbvd", ["reqcert"])
if (certChain and not privateKey) or (not certChain and privateKey):
raise SyntaxError("Must specify CERT and KEY together")
if tacks and not certChain:
raise SyntaxError("Must specify CERT with Tacks")
print("I am an HTTPS test server, I will listen on %s:%d" %
(address[0], address[1]))
if directory:
os.chdir(directory)
print("Serving files from %s" % os.getcwd())
if certChain and privateKey:
print("Using certificate and private key...")
if verifierDB:
print("Using verifier DB...")
if tacks:
print("Using Tacks...")
#############
sessionCache = SessionCache()
class MyHTTPServer(ThreadingMixIn, TLSSocketServerMixIn, HTTPServer):
def handshake(self, connection):
print("About to handshake...")
activationFlags = 0
if tacks:
if len(tacks) == 1:
activationFlags = 1
elif len(tacks) == 2:
activationFlags = 3
try:
start = time.clock()
settings = HandshakeSettings()
settings.useExperimentalTackExtension=True
connection.handshakeServer(certChain=certChain,
privateKey=privateKey,
verifierDB=verifierDB,
tacks=tacks,
activationFlags=activationFlags,
sessionCache=sessionCache,
settings=settings,
nextProtos=[b"http/1.1"])
# As an example (does not work here):
#nextProtos=[b"spdy/3", b"spdy/2", b"http/1.1"])
stop = time.clock()
except TLSRemoteAlert as a:
if a.description == AlertDescription.user_canceled:
print(str(a))
return False
else:
raise
except TLSLocalAlert as a:
if a.description == AlertDescription.unknown_psk_identity:
if username:
print("Unknown username")
return False
else:
raise
elif a.description == AlertDescription.bad_record_mac:
if username:
print("Bad username or password")
return False
else:
raise
elif a.description == AlertDescription.handshake_failure:
print("Unable to negotiate mutually acceptable parameters")
return False
else:
raise
connection.ignoreAbruptClose = True
printGoodConnection(connection, stop-start)
return True
httpd = MyHTTPServer(address, SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
if len(sys.argv) < 2:
printUsage("Missing command")
elif sys.argv[1] == "client"[:len(sys.argv[1])]:
clientCmd(sys.argv[2:])
elif sys.argv[1] == "server"[:len(sys.argv[1])]:
serverCmd(sys.argv[2:])
else:
printUsage("Unknown command: %s" % sys.argv[1])
| bsd-3-clause | 4,024,010,024,442,077,700 | -8,051,642,080,663,658,000 | 31.577381 | 94 | 0.552348 | false |
kustodian/ansible | lib/ansible/modules/network/f5/bigip_firewall_schedule.py | 24 | 21412 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_firewall_schedule
short_description: Manage BIG-IP AFM schedule configurations
description:
- Manage BIG-IP AFM schedule configurations.
version_added: 2.9
options:
name:
description:
- Specifies the name of the AFM schedule configuration.
type: str
required: True
description:
description:
- Specifies the user defined description text.
type: str
daily_hour_end:
description:
- Specifies the time of day the rule will stop being used.
- When not defined, the default of C(24:00) is used when creating a new schedule.
- The time zone is always assumed to be UTC and values must be provided as C(HH:MM) using 24hour clock format.
type: str
daily_hour_start:
description:
- Specifies the time of day the rule will start to be in use.
- The value must be a time before C(daily_hour_end).
- When not defined, the default of C(0:00) is used when creating a new schedule.
- When the value is set to C(all-day) both C(daily_hour_end) and C(daily_hour_start) are reset to their respective
defaults.
- The time zone is always assumed to be UTC and values must be provided as C(HH:MM) using 24hour clock format.
type: str
date_valid_end:
description:
- Specifies the end date/time this schedule will apply to the rule.
- The date must be after C(date_valid_start)
- When not defined the default of C(indefinite) is used when creating a new schedule.
- The time zone is always assumed to be UTC.
- The datetime format should always be the following C(YYYY-MM-DD:HH:MM:SS) format.
type: str
date_valid_start:
description:
- Specifies the start date/time this schedule will apply to the rule.
- When not defined the default of C(epoch) is used when creating a new schedule.
- The time zone is always assumed to be UTC.
- The datetime format should always be the following C(YYYY-MM-DD:HH:MM:SS) format.
type: str
days_of_week:
description:
- Specifies which days of the week the rule will be applied.
- When not defined the default value of C(all) is used when creating a new schedule.
- The C(all) value is mutually exclusive with other choices.
type: list
choices:
- sunday
- monday
- tuesday
- wednesday
- thursday
- friday
- saturday
- all
state:
description:
- When C(present), ensures that the resource exists.
- When C(absent), ensures the resource is removed.
type: str
choices:
- present
- absent
default: present
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a 6 hour two day schedule, no start/end date
bigip_firewall_schedule:
name: barfoo
daily_hour_start: 13:00
daily_hour_end: 19:00
days_of_week:
- monday
- tuesday
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a seven day schedule with start/end date
bigip_firewall_schedule:
name: foobar
date_valid_start: "{{ lookup('pipe','date +%Y-%m-%d:%H:%M:%S') }}"
date_valid_end: "{{ lookup('pipe','date -d \"now + 7 days\" +%Y-%m-%d:%H:%M:%S') }}"
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Modify created schedule to all-day
bigip_firewall_schedule:
name: barfoo
daily_hour_start: all-day
days_of_week:
- monday
- tuesday
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Modify a schedule to have no end date
bigip_firewall_schedule:
name: foobar
date_valid_start: "{{ lookup('pipe','date +%Y-%m-%d:%H:%M:%S') }}"
date_valid_end: "indefinite"
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove created schedule
bigip_firewall_schedule:
name: foobar
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
daily_hour_start:
description: The time of day the rule will start to be in use.
returned: changed
type: str
sample: '13:00'
daily_hour_end:
description: The time of day the rule will stop being used.
returned: changed
type: str
sample: '18:00'
date_valid_start:
description: The start date/time schedule will apply to the rule.
returned: changed
type: str
sample: 2019-03-01:15:30:00
date_valid_end:
description: The end date/time schedule will apply to the rule.
returned: changed
type: str
sample: 2019-03-11:15:30:00
days_of_week:
description: The days of the week the rule will be applied.
returned: changed
type: list
sample: ["monday","tuesday"]
description:
description: The user defined description text.
returned: changed
type: str
sample: Foo is bar
'''
import re
import datetime
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.compare import cmp_str_with_none
from library.module_utils.network.f5.compare import cmp_simple_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.compare import cmp_str_with_none
from ansible.module_utils.network.f5.compare import cmp_simple_list
class Parameters(AnsibleF5Parameters):
api_map = {
'dailyHourEnd': 'daily_hour_end',
'dailyHourStart': 'daily_hour_start',
'dateValidEnd': 'date_valid_end',
'dateValidStart': 'date_valid_start',
'daysOfWeek': 'days_of_week',
}
api_attributes = [
'dailyHourEnd',
'dailyHourStart',
'dateValidEnd',
'dateValidStart',
'daysOfWeek',
'description',
]
returnables = [
'daily_hour_end',
'daily_hour_start',
'date_valid_end',
'date_valid_start',
'days_of_week',
'description'
]
updatables = [
'daily_hour_end',
'daily_hour_start',
'date_valid_end',
'date_valid_start',
'days_of_week',
'description'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
def _convert_datetime(self, value):
p = r'(\d{4})-(\d{1,2})-(\d{1,2})[:, T](\d{2}):(\d{2}):(\d{2})'
match = re.match(p, value)
if match:
date = '{0}-{1}-{2}T{3}:{4}:{5}Z'.format(*match.group(1, 2, 3, 4, 5, 6))
return date
raise F5ModuleError(
'Invalid datetime provided.'
)
def _validate_time(self, value):
p = r'(\d{2}):(\d{2})'
match = re.match(p, value)
if match:
time = int(match.group(1)), int(match.group(2))
try:
datetime.time(*time)
except ValueError as ex:
raise F5ModuleError(str(ex))
def _compare_date_time(self, value1, value2, time=False):
if time:
p1 = r'(\d{2}):(\d{2})'
m1 = re.match(p1, value1)
m2 = re.match(p1, value2)
if m1 and m2:
start = tuple(int(i) for i in m1.group(1, 2))
end = tuple(int(i) for i in m2.group(1, 2))
if datetime.time(*start) > datetime.time(*end):
raise F5ModuleError(
'End time must be later than start time.'
)
else:
p1 = r'(\d{4})-(\d{1,2})-(\d{1,2})[:, T](\d{2}):(\d{2}):(\d{2})'
m1 = re.match(p1, value1)
m2 = re.match(p1, value2)
if m1 and m2:
start = tuple(int(i) for i in m1.group(1, 2, 3, 4, 5, 6))
end = tuple(int(i) for i in m2.group(1, 2, 3, 4, 5, 6))
if datetime.datetime(*start) > datetime.datetime(*end):
raise F5ModuleError(
'End date must be later than start date.'
)
@property
def daily_hour_start(self):
if self._values['daily_hour_start'] is None:
return None
if self._values['daily_hour_start'] == 'all-day':
return '0:00'
self._validate_time(self._values['daily_hour_start'])
if self._values['daily_hour_end'] is not None and self.daily_hour_end != '24:00':
self._compare_date_time(self._values['daily_hour_start'], self.daily_hour_end, time=True)
return self._values['daily_hour_start']
@property
def daily_hour_end(self):
if self._values['daily_hour_end'] is None:
return None
if self._values['daily_hour_start'] == 'all-day':
return '24:00'
if not self._values['daily_hour_end'] == '24:00':
self._validate_time(self._values['daily_hour_end'])
return self._values['daily_hour_end']
@property
def date_valid_end(self):
if self._values['date_valid_end'] is None:
return None
if self._values['date_valid_end'] in ['2038-1-18:19:14:07', 'indefinite']:
return 'indefinite'
result = self._convert_datetime(self._values['date_valid_end'])
return result
@property
def date_valid_start(self):
if self._values['date_valid_start'] is None:
return None
if self._values['date_valid_start'] in ['1970-1-1:00:00:00', 'epoch']:
return 'epoch'
result = self._convert_datetime(self._values['date_valid_start'])
if self._values['date_valid_end']:
if self._values['date_valid_end'] not in ['2038-1-18:19:14:07', 'indefinite']:
self._compare_date_time(result, self.date_valid_end)
return result
@property
def days_of_week(self):
if self._values['days_of_week'] is None:
return None
if 'all' in self._values['days_of_week']:
if len(self._values['days_of_week']) > 1 and self._values['days_of_week'] is list:
raise F5ModuleError(
"The 'all' value must not be specified with other choices."
)
week = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday']
return week
return self._values['days_of_week']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
def _convert_datetime(self, value):
if value is None:
return None
p = r'(\d{4})-(\d{1,2})-(\d{1,2})[:, T](\d{2}):(\d{2}):(\d{2})'
match = re.match(p, value)
if match:
date = '{0}-{1}-{2}:{3}:{4}:{5}'.format(*match.group(1, 2, 3, 4, 5, 6))
return date
@property
def date_valid_end(self):
result = self._convert_datetime(self._values['date_valid_end'])
return result
@property
def date_valid_start(self):
result = self._convert_datetime(self._values['date_valid_start'])
return result
@property
def days_of_week(self):
if self._values['days_of_week'] is None:
return None
if len(self._values['days_of_week']) == 7:
return 'all'
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
@property
def days_of_week(self):
return cmp_simple_list(self.want.days_of_week, self.have.days_of_week)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/security/firewall/schedule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/security/firewall/schedule/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/security/firewall/schedule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/firewall/schedule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/firewall/schedule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True
),
description=dict(),
daily_hour_end=dict(),
daily_hour_start=dict(),
date_valid_end=dict(),
date_valid_start=dict(),
days_of_week=dict(
type='list',
choices=[
'sunday',
'monday',
'tuesday',
'wednesday',
'thursday',
'friday',
'saturday',
'all',
]
),
state=dict(default='present', choices=['absent', 'present']),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | 9,042,768,054,406,960,000 | -6,861,470,452,627,447,000 | 30.958209 | 120 | 0.581963 | false |
mesoscloud/events | 0.6.1/docker.py | 2 | 6182 | import concurrent.futures
import datetime
import http.client
import json
import pickle
import select
import socket
__all__ = []
class HTTPConnection(http.client.HTTPConnection):
def __init__(self):
http.client.HTTPConnection.__init__(self, 'localhost')
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect('/var/run/docker.sock')
self.sock = sock
class HTTPError(Exception):
def __init__(self, status, reason):
self.status = status
self.reason = reason
def get(path, async=False):
conn = HTTPConnection()
try:
conn.request('GET', path)
resp = conn.getresponse()
if resp.status != 200:
raise HTTPError(resp.status, resp.reason)
except Exception:
conn.close()
raise
try:
if async:
return resp
elif resp.headers.get('Content-Type') == 'application/json':
return json.loads(resp.read().decode('utf-8'))
else:
return resp.read()
finally:
if not async:
conn.close()
def containers():
return [Container(c['Id'], c['Created']) for c in get('/containers/json')]
class Container(object):
executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
since = 0
def __init__(self, id_, created):
self.id_ = id_
self.created = created
self.logs = None
self.logs_fd = None
self.logs_stream = 'stdout'
self.stats = None
self.stats_fd = None
self._info = None
def __repr__(self):
return "<Container %s created=%r>" % (self.id_, self.created)
def __str__(self):
return "%.12s" % self.id_
def __eq__(self, other):
if self.id_ == other.id_ and self.created == other.created:
return True
def inspect(self):
return get('/containers/%s/json' % self.id_)
def logs_start(self, epoll):
try:
info = self.inspect()
except HTTPError as exc:
raise
url = '/containers/%s/logs?follow=1&stdout=1&stderr=1&since=%s×tamps=1' % (self.id_, Container.since)
print(self, url)
self.logs = Container.executor.submit(get, url, async=True)
def logs_stop(self, epoll):
# Let's attempt to cancel the future just in case
self.logs.cancel()
try:
logs = self.logs.result(timeout=0)
except (concurrent.futures.CancelledError,
concurrent.futures.TimeoutError,
HTTPError) as exc:
print(self, 'logs', exc)
return
try:
fd = logs.fileno()
epoll.unregister(fd)
print(self, 'logs', "unregistered (fd=%s)." % fd)
except FileNotFoundError:
pass
logs.close()
def logs_check(self, epoll):
if self.logs_fd is not None:
return
try:
logs = self.logs.result(timeout=0)
except (concurrent.futures.TimeoutError,
HTTPError) as exc:
print(self, 'logs', exc)
return
print(self, 'logs', logs)
self.logs_fd = logs.fileno()
print(self, 'logs', self.logs_fd)
try:
epoll.register(self.logs_fd, select.EPOLLIN)
print(self, 'logs', "registered (fd=%s)." % self.logs_fd)
except FileExistsError:
return
def stats_start(self, epoll):
try:
info = self.inspect()
except HTTPError as exc:
raise
url = '/containers/%s/stats' % self.id_
print(self, url)
self.stats = Container.executor.submit(get, url, async=True)
def stats_stop(self, epoll):
# Let's attempt to cancel the future just in case
self.stats.cancel()
try:
stats = self.stats.result(timeout=0)
except (concurrent.futures.CancelledError,
concurrent.futures.TimeoutError,
HTTPError) as exc:
print(self, 'stats', exc)
return
try:
fd = stats.fileno()
epoll.unregister(fd)
print(self, 'stats', "unregistered (fd=%s)." % fd)
except FileNotFoundError:
pass
stats.close()
def stats_check(self, epoll):
if self.stats_fd is not None:
return
try:
stats = self.stats.result(timeout=0)
except (concurrent.futures.TimeoutError,
HTTPError) as exc:
print(self, 'stats', exc)
return
print(self, 'stats', stats)
self.stats_fd = stats.fileno()
print(self, 'stats', self.stats_fd)
try:
epoll.register(self.stats_fd, select.EPOLLIN)
print(self, 'stats', "registered (fd=%s)." % self.stats_fd)
except FileExistsError:
return
def parse(data):
"""Parse stream
>>> parse(b'80\\r\\n{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7","time":1445856406}\\n\\r\\n')
(b'', b'{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7","time":1445856406}\\n')
>>> parse(b'80\\r\\n{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7","time":1445856406}\\n')
(b'80\\r\\n{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7","time":1445856406}\\n', b'')
>>> parse(b'80\\r\\n{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7"')
(b'80\\r\\n{"status":"create","id":"46e344569d70e9cf849a217701d5ef2e866dff122c1d5f1641b490e680c15c5d","from":"centos:7"', b'')
"""
if data.count(b'\r\n') < 2:
return data, b''
i = data.find(b'\r\n')
x = data[:i]
y = int(x, 16)
data = data[i + 2:]
if len(data) < y + 2:
return data, b''
line = data[:y]
data = data[y + 2:]
return data, line
| mit | -6,507,736,445,163,073,000 | -3,644,089,049,245,519,400 | 25.532189 | 166 | 0.565998 | false |
richtermondt/inithub-web | inithub/manager/views/interest_manager.py | 1 | 1278 | '''
@author: rtermondt
'''
from django.shortcuts import render_to_response
from django.template import RequestContext
from manager.models import Interest, Interests
from django.contrib.auth.decorators import login_required
@login_required()
def interest_manager(request):
system_message = None
if request.POST:
il = request.POST.getlist('interest_list')
Interests.objects.filter(
profile_id=request.session['profile_id']).delete()
for interest in il:
Interests.objects.create(profile_id=request.session[
'profile_id'], interest_id=interest)
system_message = 'Interests updated'
# return render_to_response('interest_manager.html', {
# 'system_message': 'Update complete',
#}, context_instance=RequestContext(request))
i = Interest.objects.all()
interest_selected = Interests.objects.get_query_set(
).filter(profile_id=request.session['profile_id'])
return render_to_response('interest_manager.html', {
'interest_list': i,
'interest_selected': interest_selected,
'system_message': system_message
}, RequestContext(request))
| mit | -3,815,884,573,996,428,000 | -987,811,382,506,772,900 | 37.727273 | 69 | 0.623631 | false |
hassaanm/stock-trading | pybrain-pybrain-87c7ac3/examples/rl/environments/linear_fa/xor.py | 4 | 2197 | """ Toy example for RL with linear function approximation.
This illustrates how a 'AND'-state-space can be solved, but not
an 'XOR' space.
"""
__author__ = 'Tom Schaul, [email protected]'
from pybrain.rl.learners.valuebased.linearfa import Q_LinFA
from pybrain.rl.environments.classic.xor import XORTask
from pybrain.rl.experiments.experiment import Experiment
from pybrain.rl.agents.learning import LearningAgent
from random import random, randint
class LinFA_QAgent(LearningAgent):
""" Customization of the Agent class for linear function approximation learners. """
epsilon = 0.1
logging = False
def __init__(self, learner):
self.learner = learner
self.previousobs = None
def getAction(self):
if random() < self.epsilon:
a = randint(0, self.learner.num_actions-1)
else:
a = self.learner._greedyAction(self.lastobs)
self.lastaction = a
return a
def giveReward(self, r):
LearningAgent.giveReward(self, r)
if self.previousobs is not None:
#print self.previousobs, a, self.lastreward, self.lastobs
self.learner._updateWeights(self.previousobs, self.previousaction, self.previousreward, self.lastobs)
self.previousobs = self.lastobs
self.previousaction = self.lastaction
self.previousreward = self.lastreward
def runExp(gamma=0, epsilon=0.1, xor=False, lr = 0.02):
if xor:
print "Attempting the XOR task"
else:
print "Attempting the AND task"
task = XORTask()
task.and_task = not xor
l = Q_LinFA(task.nactions, task.nsenses)
l.rewardDiscount = gamma
l.learningRate = lr
agent = LinFA_QAgent(l)
agent.epsilon = epsilon
exp = Experiment(task, agent)
sofar = 0
for i in range(30):
exp.doInteractions(100)
print exp.task.cumreward - sofar,
if i%10 == 9:
print
sofar = exp.task.cumreward
l._decayLearningRate()
if __name__ == "__main__":
runExp(xor=False)
print
runExp(xor=True)
print
runExp(xor=True) | apache-2.0 | -7,309,335,132,393,790,000 | -3,612,139,454,314,014,700 | 28.306667 | 113 | 0.622667 | false |
dmrtsvetkov/flowercoin | share/rpcuser/rpcuser.py | 115 | 1110 | #!/usr/bin/env python2
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import hashlib
import sys
import os
from random import SystemRandom
import base64
import hmac
if len(sys.argv) < 2:
sys.stderr.write('Please include username as an argument.\n')
sys.exit(0)
username = sys.argv[1]
#This uses os.urandom() underneath
cryptogen = SystemRandom()
#Create 16 byte hex salt
salt_sequence = [cryptogen.randrange(256) for i in range(16)]
hexseq = list(map(hex, salt_sequence))
salt = "".join([x[2:] for x in hexseq])
#Create 32 byte b64 password
password = base64.urlsafe_b64encode(os.urandom(32))
digestmod = hashlib.sha256
if sys.version_info.major >= 3:
password = password.decode('utf-8')
digestmod = 'SHA256'
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), digestmod)
result = m.hexdigest()
print("String to be appended to bitcoin.conf:")
print("rpcauth="+username+":"+salt+"$"+result)
print("Your password:\n"+password)
| mit | 3,456,690,073,879,359,500 | -3,899,319,885,603,075,600 | 26.073171 | 79 | 0.723423 | false |
trabacus-softapps/openerp-8.0-cc | openerp/addons/sale_crm/__openerp__.py | 5 | 2304 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Opportunity to Quotation',
'version': '1.0',
'category': 'Hidden',
'description': """
This module adds a shortcut on one or several opportunity cases in the CRM.
===========================================================================
This shortcut allows you to generate a sales order based on the selected case.
If different cases are open (a list), it generates one sale order by case.
The case is then closed and linked to the generated sales order.
We suggest you to install this module, if you installed both the sale and the crm
modules.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/crm_statistics_dashboard.jpeg', 'images/opportunity_to_quote.jpeg'],
'depends': ['sale', 'crm', 'web_kanban_gauge'],
'data': [
'wizard/crm_make_sale_view.xml',
'sale_crm_view.xml',
'sale_crm_data.xml',
'process/sale_crm_process.xml',
'security/sale_crm_security.xml',
'security/ir.model.access.csv',
'report/sale_report_view.xml',
],
'js': [
'static/src/js/sale_crm.js',
],
'demo': ['sale_crm_demo.xml'],
'test': ['test/sale_crm.yml'],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,276,672,507,424,090,600 | -4,126,690,183,857,179,000 | 38.724138 | 91 | 0.598524 | false |
darcyliu/storyboard | boto/rds/dbsnapshot.py | 29 | 2730 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class DBSnapshot(object):
"""
Represents a RDS DB Snapshot
"""
def __init__(self, connection=None, id=None):
self.connection = connection
self.id = id
self.engine = None
self.snapshot_create_time = None
self.instance_create_time = None
self.port = None
self.status = None
self.availability_zone = None
self.master_username = None
self.allocated_storage = None
self.instance_id = None
self.availability_zone = None
def __repr__(self):
return 'DBSnapshot:%s' % self.id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Engine':
self.engine = value
elif name == 'InstanceCreateTime':
self.instance_create_time = value
elif name == 'SnapshotCreateTime':
self.snapshot_create_time = value
elif name == 'DBInstanceIdentifier':
self.instance_id = value
elif name == 'DBSnapshotIdentifier':
self.id = value
elif name == 'Port':
self.port = int(value)
elif name == 'Status':
self.status = value
elif name == 'AvailabilityZone':
self.availability_zone = value
elif name == 'MasterUsername':
self.master_username = value
elif name == 'AllocatedStorage':
self.allocated_storage = int(value)
elif name == 'SnapshotTime':
self.time = value
else:
setattr(self, name, value)
| mit | 5,345,672,540,101,762,000 | -6,738,464,330,177,579,000 | 35.891892 | 74 | 0.642491 | false |
probablytom/tomwallis.net | venv/lib/python2.7/site-packages/django/contrib/gis/measure.py | 93 | 12310 | # Copyright (c) 2007, Robert Coup <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Distance nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
Distance and Area objects to allow for sensible and convenient calculation
and conversions.
Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio
Inspired by GeoPy (http://exogen.case.edu/projects/geopy/)
and Geoff Biggs' PhD work on dimensioned units for robotics.
"""
__all__ = ['A', 'Area', 'D', 'Distance']
from decimal import Decimal
from django.utils.functional import total_ordering
from django.utils import six
NUMERIC_TYPES = six.integer_types + (float, Decimal)
AREA_PREFIX = "sq_"
def pretty_name(obj):
return obj.__name__ if obj.__class__ == type else obj.__class__.__name__
@total_ordering
class MeasureBase(object):
STANDARD_UNIT = None
ALIAS = {}
UNITS = {}
LALIAS = {}
def __init__(self, default_unit=None, **kwargs):
value, self._default_unit = self.default_units(kwargs)
setattr(self, self.STANDARD_UNIT, value)
if default_unit and isinstance(default_unit, six.string_types):
self._default_unit = default_unit
def _get_standard(self):
return getattr(self, self.STANDARD_UNIT)
def _set_standard(self, value):
setattr(self, self.STANDARD_UNIT, value)
standard = property(_get_standard, _set_standard)
def __getattr__(self, name):
if name in self.UNITS:
return self.standard / self.UNITS[name]
else:
raise AttributeError('Unknown unit type: %s' % name)
def __repr__(self):
return '%s(%s=%s)' % (pretty_name(self), self._default_unit,
getattr(self, self._default_unit))
def __str__(self):
return '%s %s' % (getattr(self, self._default_unit), self._default_unit)
# **** Comparison methods ****
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.standard == other.standard
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, self.__class__):
return self.standard < other.standard
else:
return NotImplemented
# **** Operators methods ****
def __add__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard + other.standard)})
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __iadd__(self, other):
if isinstance(other, self.__class__):
self.standard += other.standard
return self
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __sub__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard - other.standard)})
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __isub__(self, other):
if isinstance(other, self.__class__):
self.standard -= other.standard
return self
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __mul__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __imul__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard *= float(other)
return self
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __rmul__(self, other):
return self * other
def __truediv__(self, other):
if isinstance(other, self.__class__):
return self.standard / other.standard
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
def __itruediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard /= float(other)
return self
else:
raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)})
def __idiv__(self, other): # Python 2 compatibility
return type(self).__itruediv__(self, other)
def __bool__(self):
return bool(self.standard)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def default_units(self, kwargs):
"""
Return the unit value and the default units specified
from the given keyword arguments dictionary.
"""
val = 0.0
default_unit = self.STANDARD_UNIT
for unit, value in six.iteritems(kwargs):
if not isinstance(value, float):
value = float(value)
if unit in self.UNITS:
val += self.UNITS[unit] * value
default_unit = unit
elif unit in self.ALIAS:
u = self.ALIAS[unit]
val += self.UNITS[u] * value
default_unit = u
else:
lower = unit.lower()
if lower in self.UNITS:
val += self.UNITS[lower] * value
default_unit = lower
elif lower in self.LALIAS:
u = self.LALIAS[lower]
val += self.UNITS[u] * value
default_unit = u
else:
raise AttributeError('Unknown unit type: %s' % unit)
return val, default_unit
@classmethod
def unit_attname(cls, unit_str):
"""
Retrieves the unit attribute name for the given unit string.
For example, if the given unit string is 'metre', 'm' would be returned.
An exception is raised if an attribute cannot be found.
"""
lower = unit_str.lower()
if unit_str in cls.UNITS:
return unit_str
elif lower in cls.UNITS:
return lower
elif lower in cls.LALIAS:
return cls.LALIAS[lower]
else:
raise Exception('Could not find a unit keyword associated with "%s"' % unit_str)
class Distance(MeasureBase):
STANDARD_UNIT = "m"
UNITS = {
'chain': 20.1168,
'chain_benoit': 20.116782,
'chain_sears': 20.1167645,
'british_chain_benoit': 20.1167824944,
'british_chain_sears': 20.1167651216,
'british_chain_sears_truncated': 20.116756,
'cm': 0.01,
'british_ft': 0.304799471539,
'british_yd': 0.914398414616,
'clarke_ft': 0.3047972654,
'clarke_link': 0.201166195164,
'fathom': 1.8288,
'ft': 0.3048,
'german_m': 1.0000135965,
'gold_coast_ft': 0.304799710181508,
'indian_yd': 0.914398530744,
'inch': 0.0254,
'km': 1000.0,
'link': 0.201168,
'link_benoit': 0.20116782,
'link_sears': 0.20116765,
'm': 1.0,
'mi': 1609.344,
'mm': 0.001,
'nm': 1852.0,
'nm_uk': 1853.184,
'rod': 5.0292,
'sears_yd': 0.91439841,
'survey_ft': 0.304800609601,
'um': 0.000001,
'yd': 0.9144,
}
# Unit aliases for `UNIT` terms encountered in Spatial Reference WKT.
ALIAS = {
'centimeter': 'cm',
'foot': 'ft',
'inches': 'inch',
'kilometer': 'km',
'kilometre': 'km',
'meter': 'm',
'metre': 'm',
'micrometer': 'um',
'micrometre': 'um',
'millimeter': 'mm',
'millimetre': 'mm',
'mile': 'mi',
'yard': 'yd',
'British chain (Benoit 1895 B)': 'british_chain_benoit',
'British chain (Sears 1922)': 'british_chain_sears',
'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated',
'British foot (Sears 1922)': 'british_ft',
'British foot': 'british_ft',
'British yard (Sears 1922)': 'british_yd',
'British yard': 'british_yd',
"Clarke's Foot": 'clarke_ft',
"Clarke's link": 'clarke_link',
'Chain (Benoit)': 'chain_benoit',
'Chain (Sears)': 'chain_sears',
'Foot (International)': 'ft',
'German legal metre': 'german_m',
'Gold Coast foot': 'gold_coast_ft',
'Indian yard': 'indian_yd',
'Link (Benoit)': 'link_benoit',
'Link (Sears)': 'link_sears',
'Nautical Mile': 'nm',
'Nautical Mile (UK)': 'nm_uk',
'US survey foot': 'survey_ft',
'U.S. Foot': 'survey_ft',
'Yard (Indian)': 'indian_yd',
'Yard (Sears)': 'sears_yd'
}
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
def __mul__(self, other):
if isinstance(other, self.__class__):
return Area(default_unit=AREA_PREFIX + self._default_unit,
**{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)})
elif isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % {
"distance": pretty_name(self.__class__),
})
class Area(MeasureBase):
STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT
# Getting the square units values and the alias dictionary.
UNITS = dict(('%s%s' % (AREA_PREFIX, k), v ** 2) for k, v in Distance.UNITS.items())
ALIAS = dict((k, '%s%s' % (AREA_PREFIX, v)) for k, v in Distance.ALIAS.items())
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
def __truediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
# Shortcuts
D = Distance
A = Area
| artistic-2.0 | -7,287,726,159,595,744,000 | 2,950,838,580,219,150,300 | 35.966967 | 112 | 0.584484 | false |
krux/adspygoogle | examples/adspygoogle/dfp/v201203/create_labels.py | 2 | 1778 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new labels. To determine which labels
exist, run get_all_labels.py. This feature is only available to DFP premium
solution networks."""
__author__ = '[email protected] (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
label_service = client.GetService(
'LabelService', 'https://www.google.com', 'v201203')
# Create label objects.
labels = []
for i in xrange(5):
label = {
'name': 'Label #%d' % i,
'isActive': 'True',
'types': ['COMPETITIVE_EXCLUSION']
}
labels.append(label)
# Add Labels.
labels = label_service.CreateLabels(labels)
# Display results.
for label in labels:
print ('Label with id \'%s\', name \'%s\', and types {%s} was found.'
% (label['id'], label['name'], ','.join(label['types'])))
| apache-2.0 | 4,739,231,014,072,436,000 | 8,729,992,221,451,796,000 | 30.75 | 80 | 0.692913 | false |
compas-dev/compas | src/compas_rhino/conduits/faces.py | 1 | 2740 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
try:
basestring
except NameError:
basestring = str
from System.Drawing.Color import FromArgb
from Rhino.Geometry import Point3d
from compas.utilities import color_to_rgb
from compas_rhino.conduits.base import BaseConduit
__all__ = ['FacesConduit']
class FacesConduit(BaseConduit):
"""A Rhino display conduit for faces.
Parameters
----------
vertices : list of list of float
The coordinates of the vertices of the faces.
faces : list of list of int
The faces defined as lists of indices in ``vertices``.
color : list of str or 3-tuple, optional
The colors of the faces.
Default is ``None``, in which case the default color is used for all faces.
Attributes
----------
color : list of RGB colors
The color specification per face.
vertices : list of list of float
The coordinates of the vertices of the faces.
faces : list of list of int
The faces defined as lists of indices in ``vertices``.
Examples
--------
.. code-block:: python
from compas.geometry import Polyhedron
from compas_rhino.conduits import FacesConduit
polyhedron = Polyhedron.generate(6)
faces = polyhedron.faces
vertices = polyhedron.vertices
polygons = [[vertices[index] for index in face] for face in faces]
conduit = FacesConduit(polygons)
with conduit.enabled():
conduit.redraw(pause=5.0)
"""
def __init__(self, vertices, faces, color=None, **kwargs):
super(FacesConduit, self).__init__(**kwargs)
self._default_color = FromArgb(255, 255, 255)
self._color = None
self.vertices = vertices or []
self.faces = faces or []
self.color = color
@property
def color(self):
return self._color
@color.setter
def color(self, color):
if not color:
return
f = len(self.faces)
if isinstance(color, (basestring, tuple)):
color = [color for _ in range(f)]
color = [FromArgb(* color_to_rgb(c)) for c in color]
c = len(color)
if c < f:
color += [self._default_color for _ in range(f - c)]
elif c > f:
color[:] = color[:f]
self._color = color
def DrawForeground(self, e):
for i, face in enumerate(self.faces):
points = [Point3d(* self.vertices[key]) for key in face]
if self.color:
e.Display.DrawPolygon(points, self.color[i], True)
else:
e.Display.DrawPolygon(points, self._default_color, True)
| mit | 5,134,634,980,020,671,000 | 8,144,634,177,910,321,000 | 28.782609 | 83 | 0.605839 | false |
cym13/anime_verif | anime_verif.py | 1 | 4001 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2013 Cédric Picard
#
# LICENSE
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# END_OF_LICENSE
#
"""
Check size anomalies and episode numbers sequence.
Usage: anime_verif.py [-a N] [-s|-n] DIRECTORY
Options:
-a, --accuracy N Do not show files whose size deviation is less than N
times the standart deviation. Default is 3.
-s, --size Check size only
-n, --numbers Check episode numbers only
If -s and -n are missing or if -s and -n are used together, size and numbers
are both checked.
"""
import os
import math
import re
from docopt import docopt
def extract_numbers(filename):
"""
Extract the numbers present in `filename'..
"""
numbers = re.compile(r'[0-9]+')
return [ int(x) for x in numbers.findall(filename) ]
def size_check(size_list, file_list, accuracy):
"""
Detect 0-sized files and size anomalies.
"""
if 0 in size_list:
print('Presence of files of size 0')
return False
# Smooth data to the MB order
size_list = [ math.floor(x / 1024**2) for x in size_list ]
# Set the average size and the variance for statistical size study
average_size = sum(size_list) / len(size_list)
var = sum({(x - average_size) ** 2 for x in size_list}) / len(size_list)
# Detect size anomalies
file_token = 0
for size in size_list:
if (size - average_size) ** 2 > accuracy * var and size < average_size:
print('Size anomaly detected: ' + \
file_list[file_token].encode('utf-8'))
return False
file_token += 1
# If the directory passed all size tests:
return True
def ep_numbers_check(file_list):
"""
Check that all episode numbers are following each other.
Rely on alphanumeric naming order.
"""
file_list.sort()
for index in range(1, len(file_list)):
prev_numbers = extract_numbers(file_list[index - 1])
follow = False
for each in extract_numbers(file_list[index]):
if (each - 1) in prev_numbers:
follow = True
if not follow:
return False
return True
if __name__ == '__main__':
arguments = docopt(__doc__)
# Set default options
if arguments['--accuracy']:
accuracy = float(arguments['--accuracy'])
else:
accuracy = 5
if not arguments['--size'] and not arguments['--numbers']:
arguments['--size'] = True
arguments['--numbers'] = True
target_dir = arguments['DIRECTORY'].decode('utf-8')
if not os.path.isdir(target_dir):
print('This is not a directory: ' + target_dir.encode('utf-8'))
os.sys.exit(1)
os.chdir(target_dir)
size_list = []
file_list = []
for each in os.listdir('.'):
if os.path.isfile(each.decode('utf-8')):
size_list.append(os.path.getsize(each.decode('utf-8')))
file_list.append(each.decode('utf-8'))
if size_list == []:
print('No file found in directory: ' + target_dir.encode('utf-8'))
os.sys.exit(1)
if arguments['--size'] and not size_check(size_list, file_list, accuracy):
os.sys.exit(1)
if arguments['--numbers'] and not ep_numbers_check(file_list):
print('Some episodes may be missing:' + target_dir.encode('utf-8'))
os.sys.exit(1)
| gpl-3.0 | -8,534,003,256,037,430,000 | -6,997,154,635,035,684,000 | 28.62963 | 79 | 0.6225 | false |
openjck/kuma | scripts/clone_db.py | 2 | 10579 | #!/usr/bin/env python2.7
"""
This script performs all the steps needed to produce an anonymized DB dump:
* Produce a dump of the original DB
* Import the dump into a temporary DB
* Run anonymize.sql on the temporary DB
* Produce an anonymized dump from the temporary DB
* Drop the temporary DB
* Delete the dump of the original DB
"""
from datetime import datetime
import os
import os.path
import subprocess
import sys
from textwrap import dedent
from optparse import OptionParser
#
# Whenever a new table is created, add appropriate steps to anonymize.sql and
# then add the table here. anonymize.sql may be run independantly, instead of
# this script, so make sure anonymize.sql performs sanitization as well.
#
# To remove a table from the anonymized database:
# Remove it from TABLES_TO_DUMP
# Add DROP TABLE IF EXISTS {table name}; to anonymize.sql
#
# To ensure an empty table in the anonymized database:
# Add to TABLES_TO_DUMP
# Add TRUNCATE {table name}; to anonymize.sql
#
# To anonymize records:
# Add to TABLES_TO_DUMP
# Add UPDATE {table name} ...; to anonymize.sql
#
# To keep production records:
# Add to TABLES_TO_DUMP
# Add a comment to anonymize.sql so future devs know you considered the table
#
TABLES_TO_DUMP=[x.strip() for x in """
account_emailaddress
account_emailconfirmation
actioncounters_actioncounterunique
actioncounters_testmodel
attachments_attachment
attachments_attachmentrevision
attachments_documentattachment
auth_group
auth_group_permissions
auth_message
auth_permission
auth_user
auth_user_groups
auth_user_user_permissions
authkeys_key
authkeys_keyaction
celery_taskmeta
celery_tasksetmeta
constance_config
contentflagging_contentflag
core_ipban
demos_submission
django_admin_log
django_cache
django_content_type
django_migrations
django_session
django_site
djcelery_crontabschedule
djcelery_intervalschedule
djcelery_periodictask
djcelery_periodictasks
djcelery_taskstate
djcelery_workerstate
feeder_bundle
feeder_bundle_feeds
feeder_entry
feeder_feed
search_filter
search_filtergroup
search_index
search_outdatedobject
soapbox_message
socialaccount_socialaccount
socialaccount_socialapp
socialaccount_socialapp_sites
socialaccount_socialtoken
tagging_tag
tagging_taggeditem
taggit_tag
taggit_taggeditem
tidings_watch
tidings_watchfilter
users_userban
waffle_flag
waffle_flag_groups
waffle_flag_users
waffle_sample
waffle_switch
wiki_document
wiki_documentdeletionlog
wiki_documenttag
wiki_documentzone
wiki_editortoolbar
wiki_localizationtag
wiki_localizationtaggedrevision
wiki_reviewtag
wiki_reviewtaggedrevision
wiki_revision
wiki_revisionip
wiki_taggeddocument
""".splitlines() if x.strip()]
def print_info(s):
if not opts.quiet: print s
def print_debug(s):
if not opts.quiet and opts.debug: print s
class NotFound(Exception):
pass
def sysprint(command):
""" Helper to print all system commands in debug mode """
print_debug("command: %s" % command)
output = subprocess.check_output(
command, shell=True, stderr=subprocess.STDOUT)
for line in output.splitlines():
if line.endswith("command not found"):
raise NotFound(output)
elif line == (
'Warning: Using a password on the command line interface can'
' be insecure.'):
pass
else:
print(line)
def main():
now = datetime.now().strftime('%Y%m%d')
usage = """\
%%prog [options] DB_NAME
Performs the steps needed to produce an anonymized DB dump.
Examples:
%%prog mdn_prod
Connect to 127.0.0.1 as root, no password.
Dump "mdn_prod", import to temporary table.
Produce mdn_prod-anon-%(now)s.sql.gz
Both the temporary table and the input dump are deleted.
%%prog -i downloaded-dump.sql.gz
Connect to 127.0.0.1 as root, no password.
Import downloaded-dump.sql.gz.
Produce mdn_prod-anon-%(now)s.sql.gz
Input dump is not deleted.
""" % dict(
now=now
)
options = OptionParser(dedent(usage.rstrip()))
options.add_option('-u', '--user',
default='root',
help='MySQL user name')
options.add_option('-p', '--password',
default='',
help='MySQL password')
options.add_option('-H', '--host',
default='127.0.0.1',
help='MySQL host')
options.add_option('-S', '--socket',
help='MySQL socket')
options.add_option('-i', '--input',
help='Input SQL dump filename')
options.add_option('-o', '--output',
help='Output SQL dump filename')
options.add_option('-q', '--quiet',
action='store_true',
help='Quiet all output')
options.add_option('-D', '--debug',
action='store_true',
help='Enable debug output')
options.add_option('--skip-input-dump',
action='store_true',
help='Skip the initial input DB dump')
options.add_option('--skip-temp-create',
action='store_true',
help='Skip creation of the temporary DB')
options.add_option('--skip-temp-import',
action='store_true',
help='Skip import of the input DB dump into the '
'temporary DB')
options.add_option('--skip-anonymize',
action='store_true',
help='Skip anonymization of the temporary DB')
options.add_option('--skip-output-dump',
action='store_true',
help='Skip the post-anonymization DB dump')
options.add_option('--skip-drop-temp-db',
action='store_true',
help='Skip dropping the temporary DB')
options.add_option('--skip-delete-input',
action='store_true',
help='Skip deleting the input DB dump')
global opts, args
(opts, args) = options.parse_args()
if len(args) < 1:
options.error("Need an input DB name")
input_db = args[0]
base_dir = os.path.dirname(__file__)
mysql_conn = '-u%(user)s %(password)s -h%(host)s' % dict(
user=opts.user,
password=opts.password and ('-p%s' % opts.password) or '',
host=opts.host,
)
if opts.socket:
mysql_conn = '-u%(user)s %(password)s -S%(socket)s' % dict(
user=opts.user,
password=opts.password and ('-p%s' % opts.password) or '',
socket=opts.socket,
)
else:
mysql_conn = '-u%(user)s %(password)s -h%(host)s' % dict(
user=opts.user,
password=opts.password and ('-p%s' % opts.password) or '',
host=opts.host,
)
if opts.input:
input_dump_fn = opts.input
else:
input_dump_fn = '%s-%s.sql.gz' % (input_db, now)
output_dump_fn = '%s-anon-%s.sql.gz' % (input_db, now)
# TODO: replace dump, create, import with mysqldbcopy
# https://dev.mysql.com/doc/mysql-utilities/1.3/en/mysqldbcopy.html
if not opts.skip_input_dump and not opts.input:
print_info("Dumping input DB to %s" % input_dump_fn)
dump_cmd = ('mysqldump %(mysql_conn)s %(input_db)s %(tables)s | '
'gzip > %(input_dump_fn)s' % dict(
mysql_conn=mysql_conn,
input_db=input_db, tables=' '.join(TABLES_TO_DUMP),
input_dump_fn=input_dump_fn
))
print_debug('\t%s' % dump_cmd)
sysprint(dump_cmd)
temp_db = '%s_anontmp_%s' % (input_db, now)
if not opts.skip_temp_create:
print_info('Creating temporary DB %s' % temp_db)
sysprint(('mysql %(mysql_conn)s -e'
'"DROP DATABASE IF EXISTS %(temp_db)s;"') %
dict(mysql_conn=mysql_conn, temp_db=temp_db))
sysprint('mysqladmin %(mysql_conn)s create %(temp_db)s' %
dict(mysql_conn=mysql_conn, temp_db=temp_db))
if not opts.skip_temp_import:
print_info('Importing the input dump into the temporary DB')
sysprint('cat %(input_dump_fn)s | gzip -dc | mysql %(mysql_conn)s '
'%(temp_db)s' % dict(
input_dump_fn=input_dump_fn, mysql_conn=mysql_conn,
temp_db=temp_db
))
if not opts.skip_anonymize:
anon_sql_fn = os.path.join(base_dir, 'anonymize.sql')
print_info('Applying %s to the temporary DB' % anon_sql_fn)
sysprint('cat %(anon_sql_fn)s | mysql %(mysql_conn)s '
'%(temp_db)s' % dict(
anon_sql_fn=anon_sql_fn, mysql_conn=mysql_conn,
temp_db=temp_db
))
if not opts.skip_output_dump:
print_info("Dumping temporary DB to %s" % output_dump_fn)
dump_cmd = ('mysqldump %(mysql_conn)s %(temp_db)s | '
'gzip > %(output_dump_fn)s' % dict(
mysql_conn=mysql_conn, temp_db=temp_db,
output_dump_fn=output_dump_fn
))
print_debug('\t%s' % dump_cmd)
sysprint(dump_cmd)
if not opts.skip_drop_temp_db:
print_info("Dropping temporary db %s" % temp_db)
sysprint('mysqladmin %(mysql_conn)s -f drop %(temp_db)s' %
dict(mysql_conn=mysql_conn, temp_db=temp_db))
if not opts.skip_delete_input and not opts.input:
print_info('Deleting input DB dump %s' % input_dump_fn)
os.remove(input_dump_fn)
if __name__ == '__main__':
retcode = None
error = None
try:
main()
except subprocess.CalledProcessError as e:
if e.retcode < 0:
error = "Command was terminated by signal"
retcode = -e.retcode
else:
error = "Command errored with code %s" % e.retcode
retcode = e.retcode
except (NotFound, OSError) as e:
error = "Command failed: %s" % e
retcode = 127
if error:
print >>sys.stderr, error
print >>sys.stderr, "Clone FAILED."
sys.exit(retcode)
else:
print >>sys.stderr, "Clone complete."
| mpl-2.0 | -7,214,546,676,629,363,000 | -2,707,997,522,231,651,000 | 31.057576 | 78 | 0.58739 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.