text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""
Boxing and unboxing of native Numba values to / from CPython objects.
"""
from llvmlite import ir
from .. import cgutils, numpy_support, types
from ..pythonapi import box, unbox, reflect, NativeValue
from . import listobj
#
# Scalar types
#
@box(types.Boolean)
def box_bool(c, typ, val):
longval = c.builder.zext(val, c.pyapi.long)
return c.pyapi.bool_from_long(longval)
@unbox(types.Boolean)
def unbox_boolean(c, typ, obj):
istrue = c.pyapi.object_istrue(obj)
zero = ir.Constant(istrue.type, 0)
val = c.builder.icmp_signed('!=', istrue, zero)
return NativeValue(val, is_error=c.pyapi.c_api_error())
@box(types.Integer)
def box_integer(c, typ, val):
if typ.signed:
ival = c.builder.sext(val, c.pyapi.longlong)
return c.pyapi.long_from_longlong(ival)
else:
ullval = c.builder.zext(val, c.pyapi.ulonglong)
return c.pyapi.long_from_ulonglong(ullval)
@unbox(types.Integer)
def unbox_integer(c, typ, obj):
ll_type = c.context.get_argument_type(typ)
val = cgutils.alloca_once(c.builder, ll_type)
longobj = c.pyapi.number_long(obj)
with c.pyapi.if_object_ok(longobj):
if typ.signed:
llval = c.pyapi.long_as_longlong(longobj)
else:
llval = c.pyapi.long_as_ulonglong(longobj)
c.pyapi.decref(longobj)
c.builder.store(c.builder.trunc(llval, ll_type), val)
return NativeValue(c.builder.load(val),
is_error=c.pyapi.c_api_error())
@box(types.Float)
def box_float(c, typ, val):
if typ == types.float32:
dbval = c.builder.fpext(val, c.pyapi.double)
else:
assert typ == types.float64
dbval = val
return c.pyapi.float_from_double(dbval)
@unbox(types.Float)
def unbox_float(c, typ, obj):
fobj = c.pyapi.number_float(obj)
dbval = c.pyapi.float_as_double(fobj)
c.pyapi.decref(fobj)
if typ == types.float32:
val = c.builder.fptrunc(dbval,
c.context.get_argument_type(typ))
else:
assert typ == types.float64
val = dbval
return NativeValue(val, is_error=c.pyapi.c_api_error())
@box(types.Complex)
def box_complex(c, typ, val):
cmplxcls = c.context.make_complex(typ)
cval = cmplxcls(c.context, c.builder, value=val)
if typ == types.complex64:
freal = c.builder.fpext(cval.real, c.pyapi.double)
fimag = c.builder.fpext(cval.imag, c.pyapi.double)
else:
assert typ == types.complex128
freal, fimag = cval.real, cval.imag
return c.pyapi.complex_from_doubles(freal, fimag)
@unbox(types.Complex)
def unbox_complex(c, typ, obj):
c128cls = c.context.make_complex(types.complex128)
c128 = c128cls(c.context, c.builder)
ok = c.pyapi.complex_adaptor(obj, c128._getpointer())
failed = cgutils.is_false(c.builder, ok)
with cgutils.if_unlikely(c.builder, failed):
c.pyapi.err_set_string("PyExc_TypeError",
"conversion to %s failed" % (typ,))
if typ == types.complex64:
cplxcls = c.context.make_complex(typ)
cplx = cplxcls(c.context, c.builder)
cplx.real = c.context.cast(c.builder, c128.real,
types.float64, types.float32)
cplx.imag = c.context.cast(c.builder, c128.imag,
types.float64, types.float32)
else:
assert typ == types.complex128
cplx = c128
return NativeValue(cplx._getvalue(), is_error=failed)
@box(types.NoneType)
def box_none(c, typ, val):
return c.pyapi.make_none()
@unbox(types.NoneType)
@unbox(types.EllipsisType)
def unbox_none(c, typ, val):
return NativeValue(c.context.get_dummy_value())
@box(types.NPDatetime)
def box_npdatetime(c, typ, val):
return c.pyapi.create_np_datetime(val, typ.unit_code)
@unbox(types.NPDatetime)
def unbox_npdatetime(c, typ, obj):
val = c.pyapi.extract_np_datetime(obj)
return NativeValue(val, is_error=c.pyapi.c_api_error())
@box(types.NPTimedelta)
def box_nptimedelta(c, typ, val):
return c.pyapi.create_np_timedelta(val, typ.unit_code)
@unbox(types.NPTimedelta)
def unbox_nptimedelta(c, typ, obj):
val = c.pyapi.extract_np_timedelta(obj)
return NativeValue(val, is_error=c.pyapi.c_api_error())
@box(types.RawPointer)
def box_raw_pointer(c, typ, val):
"""
Convert a raw pointer to a Python int.
"""
ll_intp = c.context.get_value_type(types.uintp)
addr = c.builder.ptrtoint(val, ll_intp)
return c.box(types.uintp, addr)
#
# Composite types
#
@box(types.Record)
def box_record(c, typ, val):
# Note we will create a copy of the record
# This is the only safe way.
size = ir.Constant(ir.IntType(32), val.type.pointee.count)
ptr = c.builder.bitcast(val, ir.PointerType(ir.IntType(8)))
return c.pyapi.recreate_record(ptr, size, typ.dtype, c.env_manager)
@unbox(types.Record)
def unbox_record(c, typ, obj):
buf = c.pyapi.alloca_buffer()
ptr = c.pyapi.extract_record_data(obj, buf)
is_error = cgutils.is_null(c.builder, ptr)
ltyp = c.context.get_value_type(typ)
val = c.builder.bitcast(ptr, ltyp)
def cleanup():
c.pyapi.release_buffer(buf)
return NativeValue(val, cleanup=cleanup, is_error=is_error)
@box(types.CharSeq)
def box_charseq(c, typ, val):
rawptr = cgutils.alloca_once_value(c.builder, value=val)
strptr = c.builder.bitcast(rawptr, c.pyapi.cstring)
fullsize = c.context.get_constant(types.intp, typ.count)
zero = c.context.get_constant(types.intp, 0)
count = cgutils.alloca_once_value(c.builder, zero)
bbend = c.builder.append_basic_block("end.string.count")
# Find the length of the string
with cgutils.loop_nest(c.builder, [fullsize], fullsize.type) as [idx]:
# Get char at idx
ch = c.builder.load(c.builder.gep(strptr, [idx]))
# Store the current index as count
c.builder.store(idx, count)
# Check if the char is a null-byte
ch_is_null = cgutils.is_null(c.builder, ch)
# If the char is a null-byte
with c.builder.if_then(ch_is_null):
# Jump to the end
c.builder.branch(bbend)
# This is reached if there is no null-byte in the string
# Then, set count to the fullsize
c.builder.store(fullsize, count)
# Jump to the end
c.builder.branch(bbend)
c.builder.position_at_end(bbend)
strlen = c.builder.load(count)
return c.pyapi.bytes_from_string_and_size(strptr, strlen)
@unbox(types.CharSeq)
def unbox_charseq(c, typ, obj):
lty = c.context.get_value_type(typ)
ok, buffer, size = c.pyapi.string_as_string_and_size(obj)
# If conversion is ok, copy the buffer to the output storage.
with cgutils.if_likely(c.builder, ok):
# Check if the returned string size fits in the charseq
storage_size = ir.Constant(size.type, typ.count)
size_fits = c.builder.icmp_unsigned("<=", size, storage_size)
# Allow truncation of string
size = c.builder.select(size_fits, size, storage_size)
# Initialize output to zero bytes
null_string = ir.Constant(lty, None)
outspace = cgutils.alloca_once_value(c.builder, null_string)
# We don't need to set the NULL-terminator because the storage
# is already zero-filled.
cgutils.memcpy(c.builder,
c.builder.bitcast(outspace, buffer.type),
buffer, size)
ret = c.builder.load(outspace)
return NativeValue(ret, is_error=c.builder.not_(ok))
@unbox(types.Optional)
def unbox_optional(c, typ, obj):
"""
Convert object *obj* to a native optional structure.
"""
noneval = c.context.make_optional_none(c.builder, typ.type)
is_not_none = c.builder.icmp_signed('!=', obj, c.pyapi.borrow_none())
retptr = cgutils.alloca_once(c.builder, noneval.type)
errptr = cgutils.alloca_once_value(c.builder, cgutils.false_bit)
with c.builder.if_else(is_not_none) as (then, orelse):
with then:
native = c.unbox(typ.type, obj)
just = c.context.make_optional_value(c.builder,
typ.type, native.value)
c.builder.store(just, retptr)
c.builder.store(native.is_error, errptr)
with orelse:
c.builder.store(noneval, retptr)
if native.cleanup is not None:
def cleanup():
with c.builder.if_then(is_not_none):
native.cleanup()
else:
cleanup = None
ret = c.builder.load(retptr)
return NativeValue(ret, is_error=c.builder.load(errptr),
cleanup=cleanup)
@unbox(types.Slice3Type)
def unbox_slice(c, typ, obj):
"""
Convert object *obj* to a native slice structure.
"""
from . import slicing
ok, start, stop, step = \
c.pyapi.slice_as_ints(obj, slicing.get_defaults(c.context))
slice3 = slicing.Slice(c.context, c.builder)
slice3.start = start
slice3.stop = stop
slice3.step = step
return NativeValue(slice3._getvalue(), is_error=c.builder.not_(ok))
#
# Collections
#
# NOTE: boxing functions are supposed to steal any NRT references in
# the given native value.
@box(types.Array)
def box_array(c, typ, val):
nativearycls = c.context.make_array(typ)
nativeary = nativearycls(c.context, c.builder, value=val)
if c.context.enable_nrt:
np_dtype = numpy_support.as_dtype(typ.dtype)
dtypeptr = c.env_manager.read_const(c.env_manager.add_const(np_dtype))
# Steals NRT ref
newary = c.pyapi.nrt_adapt_ndarray_to_python(typ, val, dtypeptr)
return newary
else:
parent = nativeary.parent
c.pyapi.incref(parent)
return parent
@unbox(types.Buffer)
def unbox_buffer(c, typ, obj):
"""
Convert a Py_buffer-providing object to a native array structure.
"""
buf = c.pyapi.alloca_buffer()
res = c.pyapi.get_buffer(obj, buf)
is_error = cgutils.is_not_null(c.builder, res)
nativearycls = c.context.make_array(typ)
nativeary = nativearycls(c.context, c.builder)
aryptr = nativeary._getpointer()
with cgutils.if_likely(c.builder, c.builder.not_(is_error)):
ptr = c.builder.bitcast(aryptr, c.pyapi.voidptr)
if c.context.enable_nrt:
c.pyapi.nrt_adapt_buffer_from_python(buf, ptr)
else:
c.pyapi.numba_buffer_adaptor(buf, ptr)
def cleanup():
c.pyapi.release_buffer(buf)
return NativeValue(c.builder.load(aryptr), is_error=is_error,
cleanup=cleanup)
@unbox(types.Array)
def unbox_array(c, typ, obj):
"""
Convert a Numpy array object to a native array structure.
"""
# This is necessary because unbox_buffer() does not work on some
# dtypes, e.g. datetime64 and timedelta64.
# TODO check matching dtype.
# currently, mismatching dtype will still work and causes
# potential memory corruption
nativearycls = c.context.make_array(typ)
nativeary = nativearycls(c.context, c.builder)
aryptr = nativeary._getpointer()
ptr = c.builder.bitcast(aryptr, c.pyapi.voidptr)
if c.context.enable_nrt:
errcode = c.pyapi.nrt_adapt_ndarray_from_python(obj, ptr)
else:
errcode = c.pyapi.numba_array_adaptor(obj, ptr)
failed = cgutils.is_not_null(c.builder, errcode)
return NativeValue(c.builder.load(aryptr), is_error=failed)
@box(types.Tuple)
@box(types.UniTuple)
def box_tuple(c, typ, val):
"""
Convert native array or structure *val* to a tuple object.
"""
tuple_val = c.pyapi.tuple_new(typ.count)
for i, dtype in enumerate(typ):
item = c.builder.extract_value(val, i)
obj = c.box(dtype, item)
c.pyapi.tuple_setitem(tuple_val, i, obj)
return tuple_val
@box(types.NamedTuple)
@box(types.NamedUniTuple)
def box_namedtuple(c, typ, val):
"""
Convert native array or structure *val* to a namedtuple object.
"""
cls_obj = c.pyapi.unserialize(c.pyapi.serialize_object(typ.instance_class))
tuple_obj = box_tuple(c, typ, val)
obj = c.pyapi.call(cls_obj, tuple_obj)
c.pyapi.decref(cls_obj)
c.pyapi.decref(tuple_obj)
return obj
@unbox(types.BaseTuple)
def unbox_tuple(c, typ, obj):
"""
Convert tuple *obj* to a native array (if homogenous) or structure.
"""
n = len(typ)
values = []
cleanups = []
is_error = cgutils.false_bit
for i, eltype in enumerate(typ):
elem = c.pyapi.tuple_getitem(obj, i)
native = c.unbox(eltype, elem)
values.append(native.value)
is_error = c.builder.or_(is_error, native.is_error)
if native.cleanup is not None:
cleanups.append(native.cleanup)
if cleanups:
def cleanup():
for func in reversed(cleanups):
func()
else:
cleanup = None
if isinstance(typ, types.UniTuple):
value = cgutils.pack_array(c.builder, values)
else:
value = cgutils.make_anonymous_struct(c.builder, values)
return NativeValue(value, is_error=is_error, cleanup=cleanup)
@box(types.List)
def box_list(c, typ, val):
"""
Convert native list *val* to a list object.
"""
list = listobj.ListInstance(c.context, c.builder, typ, val)
obj = list.parent
res = cgutils.alloca_once_value(c.builder, obj)
with c.builder.if_else(cgutils.is_not_null(c.builder, obj)) as (has_parent, otherwise):
with has_parent:
# List is actually reflected => return the original object
# (note not all list instances whose *type* is reflected are
# actually reflected; see numba.tests.test_lists for an example)
c.pyapi.incref(obj)
with otherwise:
# Build a new Python list
nitems = list.size
obj = c.pyapi.list_new(nitems)
with c.builder.if_then(cgutils.is_not_null(c.builder, obj),
likely=True):
with cgutils.for_range(c.builder, nitems) as loop:
item = list.getitem(loop.index)
itemobj = c.box(typ.dtype, item)
c.pyapi.list_setitem(obj, loop.index, itemobj)
c.builder.store(obj, res)
# Steal NRT ref
c.context.nrt_decref(c.builder, typ, val)
return c.builder.load(res)
@unbox(types.List)
def unbox_list(c, typ, obj):
"""
Convert list *obj* to a native list.
If list was previously unboxed, we reuse the existing native list
to ensure consistency.
"""
size = c.pyapi.list_size(obj)
errorptr = cgutils.alloca_once_value(c.builder, cgutils.false_bit)
listptr = cgutils.alloca_once(c.builder, c.context.get_value_type(typ))
# Use pointer-stuffing hack to see if the list was previously unboxed,
# if so, re-use the meminfo.
ptr = c.pyapi.list_get_private_data(obj)
with c.builder.if_else(cgutils.is_not_null(c.builder, ptr)) \
as (has_meminfo, otherwise):
with has_meminfo:
# List was previously unboxed => reuse meminfo
list = listobj.ListInstance.from_meminfo(c.context, c.builder, typ, ptr)
list.size = size
if typ.reflected:
list.parent = obj
c.builder.store(list.value, listptr)
with otherwise:
# Allocate a new native list
ok, list = listobj.ListInstance.allocate_ex(c.context, c.builder, typ, size)
with c.builder.if_else(ok, likely=True) as (if_ok, if_not_ok):
with if_ok:
list.size = size
with cgutils.for_range(c.builder, size) as loop:
itemobj = c.pyapi.list_getitem(obj, loop.index)
# XXX we don't call native cleanup for each
# list element, since that would require keeping
# of which unboxings have been successful.
native = c.unbox(typ.dtype, itemobj)
with c.builder.if_then(native.is_error, likely=False):
c.builder.store(cgutils.true_bit, errorptr)
list.setitem(loop.index, native.value)
if typ.reflected:
list.parent = obj
# Stuff meminfo pointer into the Python object for
# later reuse.
c.pyapi.list_set_private_data(obj, list.meminfo)
c.builder.store(list.value, listptr)
with if_not_ok:
c.builder.store(cgutils.true_bit, errorptr)
# If an error occurred, drop the whole native list
with c.builder.if_then(c.builder.load(errorptr)):
c.context.nrt_decref(c.builder, typ, list.value)
def cleanup():
# Clean up the stuffed pointer, as the meminfo is now invalid.
c.pyapi.list_reset_private_data(obj)
return NativeValue(c.builder.load(listptr),
is_error=c.builder.load(errorptr),
cleanup=cleanup)
@reflect(types.List)
def reflect_list(c, typ, val):
"""
Reflect the native list's contents into the Python object.
"""
if not typ.reflected:
return
list = listobj.ListInstance(c.context, c.builder, typ, val)
with c.builder.if_then(list.dirty, likely=False):
obj = list.parent
size = c.pyapi.list_size(obj)
new_size = list.size
diff = c.builder.sub(new_size, size)
diff_gt_0 = c.builder.icmp_signed('>=', diff,
ir.Constant(diff.type, 0))
with c.builder.if_else(diff_gt_0) as (if_grow, if_shrink):
# XXX no error checking below
with if_grow:
# First overwrite existing items
with cgutils.for_range(c.builder, size) as loop:
item = list.getitem(loop.index)
itemobj = c.box(typ.dtype, item)
c.pyapi.list_setitem(obj, loop.index, itemobj)
# Then add missing items
with cgutils.for_range(c.builder, diff) as loop:
idx = c.builder.add(size, loop.index)
item = list.getitem(idx)
itemobj = c.box(typ.dtype, item)
c.pyapi.list_append(obj, itemobj)
c.pyapi.decref(itemobj)
with if_shrink:
# First delete list tail
c.pyapi.list_setslice(obj, new_size, size, None)
# Then overwrite remaining items
with cgutils.for_range(c.builder, new_size) as loop:
item = list.getitem(loop.index)
itemobj = c.box(typ.dtype, item)
c.pyapi.list_setitem(obj, loop.index, itemobj)
# Mark the list clean, in case it is reflected twice
list.set_dirty(False)
#
# Other types
#
@box(types.Generator)
def box_generator(c, typ, val):
return c.pyapi.from_native_generator(val, typ, c.env_manager.env_ptr)
@unbox(types.Generator)
def unbox_generator(c, typ, obj):
return c.pyapi.to_native_generator(obj, typ)
@box(types.DType)
def box_dtype(c, typ, val):
np_dtype = numpy_support.as_dtype(typ.dtype)
return c.pyapi.unserialize(c.pyapi.serialize_object(np_dtype))
@box(types.PyObject)
@box(types.Object)
def box_pyobject(c, typ, val):
return val
@unbox(types.PyObject)
@unbox(types.Object)
def unbox_pyobject(c, typ, obj):
return NativeValue(obj)
@unbox(types.ExternalFunctionPointer)
def unbox_funcptr(c, typ, obj):
if typ.get_pointer is None:
raise NotImplementedError(typ)
# Call get_pointer() on the object to get the raw pointer value
ptrty = c.context.get_function_pointer_type(typ)
ret = cgutils.alloca_once_value(c.builder,
ir.Constant(ptrty, None),
name='fnptr')
ser = c.pyapi.serialize_object(typ.get_pointer)
get_pointer = c.pyapi.unserialize(ser)
with cgutils.if_likely(c.builder,
cgutils.is_not_null(c.builder, get_pointer)):
intobj = c.pyapi.call_function_objargs(get_pointer, (obj,))
c.pyapi.decref(get_pointer)
with cgutils.if_likely(c.builder,
cgutils.is_not_null(c.builder, intobj)):
ptr = c.pyapi.long_as_voidptr(intobj)
c.pyapi.decref(intobj)
c.builder.store(c.builder.bitcast(ptr, ptrty), ret)
return NativeValue(c.builder.load(ret), is_error=c.pyapi.c_api_error())
| {
"content_hash": "fbff8d1925978fae58fd00ff5ba508c6",
"timestamp": "",
"source": "github",
"line_count": 615,
"max_line_length": 91,
"avg_line_length": 33.66341463414634,
"alnum_prop": 0.6138723856445926,
"repo_name": "pombredanne/numba",
"id": "ff7dd0204ecd7c0f24c6e27bd8aa3183c909a447",
"size": "20703",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "numba/targets/boxing.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2212"
},
{
"name": "C",
"bytes": "249112"
},
{
"name": "C++",
"bytes": "17024"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "HTML",
"bytes": "98846"
},
{
"name": "PowerShell",
"bytes": "3153"
},
{
"name": "Python",
"bytes": "3320040"
},
{
"name": "Shell",
"bytes": "120"
}
],
"symlink_target": ""
} |
from django import forms
from django.contrib.auth.forms import (
UserChangeForm as DjangoUserChangeForm,
UserCreationForm as DjangoUserCreationForm
)
from .models import User
class UserForm(forms.ModelForm):
class Meta:
# Set this form to use the User model.
model = User
# Constrain the UserForm to just these fields.
fields = ("first_name", "last_name")
class UserChangeForm(DjangoUserChangeForm):
class Meta(DjangoUserChangeForm.Meta):
model = User
class UserCreationForm(DjangoUserCreationForm):
class Meta(DjangoUserCreationForm.Meta):
model = User
def clean_username(self):
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
| {
"content_hash": "d6d4c4e60fa8633b820ddb995257bd99",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 78,
"avg_line_length": 24.72972972972973,
"alnum_prop": 0.687431693989071,
"repo_name": "wldcordeiro/cookiecutter-django-essentials",
"id": "00e36128dc46b51f6fdadcb997c99c53e7d350f7",
"size": "915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/users/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5140"
},
{
"name": "CSS",
"bytes": "34"
},
{
"name": "HTML",
"bytes": "15274"
},
{
"name": "JavaScript",
"bytes": "3854"
},
{
"name": "Makefile",
"bytes": "5652"
},
{
"name": "Python",
"bytes": "48039"
}
],
"symlink_target": ""
} |
from functools import update_wrapper
from weakref import WeakSet
from django.apps import apps
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, reverse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog
all_sites = WeakSet()
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = ugettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = ugettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = ugettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
all_sites.add(self)
def check(self, app_configs):
"""
Run the system checks on all ModelAdmins, except if they aren't
customized at all.
"""
if app_configs is None:
app_configs = apps.get_app_configs()
app_configs = set(app_configs) # Speed up lookups below
errors = []
modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin)
for modeladmin in modeladmins:
if modeladmin.model._meta.app_config in app_configs:
errors.extend(modeladmin.check())
return errors
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured(
'The model %s is abstract, so it cannot be registered with admin.' % model.__name__
)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Returns a dictionary of variables to put in the template context for
*every* page in the admin site.
For sites running on a subpath, use the SCRIPT_NAME value if site_url
hasn't been customized.
"""
script_name = request.META['SCRIPT_NAME']
site_url = script_name if self.site_url == '/' and script_name else self.site_url
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import PasswordChangeView
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'form_class': AdminPasswordChangeForm,
'success_url': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return PasswordChangeView.as_view(**defaults)(request)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import PasswordChangeDoneView
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return PasswordChangeDoneView.as_view(**defaults)(request)
def i18n_javascript(self, request, extra_context=None):
"""
Displays the i18n JavaScript that the Django admin requires.
`extra_context` is unused but present for consistency with the other
admin views.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import LogoutView
defaults = {
'extra_context': dict(
self.each_context(request),
# Since the user isn't logged out at this point, the value of
# has_permission must be overridden.
has_permission=False,
**(extra_context or {})
),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return LogoutView.as_view(**defaults)(request)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import LoginView
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(
self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
username=request.user.get_username(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return LoginView.as_view(**defaults)(request)
def _build_app_dict(self, request, label=None):
"""
Builds the app dictionary. Takes an optional label parameters to filter
models of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add'):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Returns a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or 'admin/login.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(
self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| {
"content_hash": "9a1281f4aef334c6cec6bc5d957df58e",
"timestamp": "",
"source": "github",
"line_count": 520,
"max_line_length": 109,
"avg_line_length": 38.58653846153846,
"alnum_prop": 0.5931223523548468,
"repo_name": "scifiswapnil/Project-LoCatr",
"id": "e70a27a152a3d8d5d8fd844b5ee9eb3811e23b63",
"size": "20065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/django/contrib/admin/sites.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47175"
},
{
"name": "HTML",
"bytes": "79071"
},
{
"name": "JavaScript",
"bytes": "115482"
},
{
"name": "Python",
"bytes": "6329362"
},
{
"name": "Shell",
"bytes": "3282"
}
],
"symlink_target": ""
} |
import datetime
from typing import Optional, Type
from unittest.mock import Mock
import pytest
from anchore_engine.db.entities.policy_engine import (
DistroNamespace,
FeedGroupMetadata,
GrypeDBFeedMetadata,
)
from anchore_engine.services.policy_engine.engine.policy.gate_util_provider import (
GateUtilProvider,
GrypeGateUtilProvider,
LegacyGateUtilProvider,
)
grype_db_for_unsupported_distro = GrypeDBFeedMetadata(
groups=[
{"name": "ubuntu:20.04", "record_count": 4909},
{"name": "amzn:2", "record_count": 0},
{"name": "alpine:3.10", "record_count": 200},
{"name": "debian:10", "record_count": 500},
{"name": "github:python", "record_count": 800},
]
)
class TestGateUtilProvider:
sync_time = datetime.datetime.utcnow()
@pytest.mark.parametrize(
"gate_util_provider, feed_group_metadata, grype_db_feed_metadata, expected_oldest_update",
[
# Case, legacy provider, feed group exists
(
LegacyGateUtilProvider,
FeedGroupMetadata(
last_sync=sync_time,
name="test-feed-out-of-date",
),
None,
sync_time,
),
# Case, legacy provider, feed group does not exist
(
LegacyGateUtilProvider,
None,
None,
None,
),
# Case, grype provider, active grype DB exists
(
GrypeGateUtilProvider,
None,
GrypeDBFeedMetadata(built_at=sync_time),
sync_time,
),
# Case, grype provider, active grype DB does not exist
(
GrypeGateUtilProvider,
None,
None,
None,
),
],
)
def test_oldest_namespace_feed_sync(
self,
gate_util_provider: Type[GateUtilProvider],
feed_group_metadata: Optional[FeedGroupMetadata],
grype_db_feed_metadata: Optional[GrypeDBFeedMetadata],
expected_oldest_update: Optional[datetime.datetime],
mock_distromapping_query,
mock_gate_util_provider_feed_data,
):
ns = DistroNamespace(name="DEB", version="10", like_distro=None)
ns.like_namespace_names = ["debian:10"]
mock_gate_util_provider_feed_data(
feed_group_metadata=feed_group_metadata,
grype_db_feed_metadata=grype_db_feed_metadata,
)
provider = gate_util_provider()
oldest_update = provider.oldest_namespace_feed_sync(ns)
assert oldest_update == expected_oldest_update
@pytest.mark.parametrize(
"grypedb, distro, version, expected",
[
(grype_db_for_unsupported_distro, "amzn", "2", False),
(grype_db_for_unsupported_distro, "alpine", "3.10", True),
(grype_db_for_unsupported_distro, "debian", "10", True),
(grype_db_for_unsupported_distro, "github", "python", True),
(grype_db_for_unsupported_distro, "ubuntu", "17.04", False),
(None, "alpine", "3.10", False), # This one tests no active grypedb
(GrypeDBFeedMetadata(groups=None), "alpine", "3.10", False),
],
)
def test_have_vulnerabilities_for_grype_provider(
self,
grypedb,
distro: str,
version: str,
expected: bool,
mock_gate_util_provider_feed_data,
):
# Setup
distro_namespace = Mock()
base_distro_name = distro + ":" + version
distro_namespace.like_namespace_names = [
f"{base_distro_name}.0",
f"{base_distro_name}.1",
f"{base_distro_name}.2",
base_distro_name,
]
provider = GrypeGateUtilProvider()
mock_gate_util_provider_feed_data(grype_db_feed_metadata=grypedb)
# Method under test
result = provider.have_vulnerabilities_for(distro_namespace)
# Assert expected result
assert result is expected
| {
"content_hash": "5ae9722c6f0190e7d8ad0933ac4316d3",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 98,
"avg_line_length": 32.503937007874015,
"alnum_prop": 0.564922480620155,
"repo_name": "anchore/anchore-engine",
"id": "aa0bf074a83c73ea5a6db07d32153cc68af3392d",
"size": "4128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/anchore_engine/services/policy_engine/engine/policy/test_gate_util_provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3889"
},
{
"name": "Dockerfile",
"bytes": "10954"
},
{
"name": "Makefile",
"bytes": "12274"
},
{
"name": "Python",
"bytes": "4529553"
},
{
"name": "Shell",
"bytes": "16598"
}
],
"symlink_target": ""
} |
import logging
import sys
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.models import PreprintService
from website.preprints.tasks import on_preprint_updated
from website import settings
logger = logging.getLogger(__name__)
def get_targets():
return [p['_id'] for p in database['preprintservice'].find()]
def migrate():
assert settings.SHARE_URL, 'SHARE_URL must be set to migrate.'
assert settings.SHARE_API_TOKEN, 'SHARE_API_TOKEN must be set to migrate.'
targets = get_targets()
target_count = len(targets)
successes = []
failures = []
count = 0
logger.info('Preparing to migrate {} preprints.'.format(target_count))
for preprint_id in targets:
count += 1
logger.info('{}/{} - {}'.format(count, target_count, preprint_id))
try:
on_preprint_updated(preprint_id)
except Exception as e:
# TODO: This reliably fails for certain nodes with
# IncompleteRead(0 bytes read)
failures.append(preprint_id)
logger.warn('Encountered exception {} while posting to SHARE for preprint {}'.format(e, preprint_id))
else:
successes.append(preprint_id)
logger.info('Successes: {}'.format(successes))
logger.info('Failures: {}'.format(failures))
def main():
dry_run = '--dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
if __name__ == "__main__":
main()
| {
"content_hash": "52f34ef83489378ecefd74f2c5854130",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 113,
"avg_line_length": 31.696428571428573,
"alnum_prop": 0.6563380281690141,
"repo_name": "rdhyee/osf.io",
"id": "58f2d06f265cb5ff9799299c605a669ed2a2c4d3",
"size": "1775",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "scripts/migration/migrate_share_preprint_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "174764"
},
{
"name": "HTML",
"bytes": "131860"
},
{
"name": "JavaScript",
"bytes": "1663707"
},
{
"name": "Mako",
"bytes": "679787"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "6720626"
}
],
"symlink_target": ""
} |
"""
Test model API
"""
import six
import copy
import collections
from datetime import datetime
from unittest import TestCase
from pynamodb.throttle import Throttle
from pynamodb.connection.util import pythonic
from pynamodb.exceptions import TableError
from pynamodb.types import RANGE
from pynamodb.constants import (
ITEM, STRING_SHORT, ALL, KEYS_ONLY, INCLUDE, REQUEST_ITEMS, UNPROCESSED_KEYS,
RESPONSES, KEYS, ITEMS, LAST_EVALUATED_KEY, EXCLUSIVE_START_KEY, ATTRIBUTES
)
from pynamodb.models import Model
from pynamodb.indexes import (
GlobalSecondaryIndex, LocalSecondaryIndex, AllProjection,
IncludeProjection, KeysOnlyProjection, Index
)
from pynamodb.attributes import (
UnicodeAttribute, NumberAttribute, BinaryAttribute, UTCDateTimeAttribute,
UnicodeSetAttribute, NumberSetAttribute, BinarySetAttribute)
from .response import HttpOK, HttpBadRequest
from .data import (
MODEL_TABLE_DATA, GET_MODEL_ITEM_DATA, SIMPLE_MODEL_TABLE_DATA,
BATCH_GET_ITEMS, SIMPLE_BATCH_GET_ITEMS, COMPLEX_TABLE_DATA,
COMPLEX_ITEM_DATA, INDEX_TABLE_DATA, LOCAL_INDEX_TABLE_DATA,
CUSTOM_ATTR_NAME_INDEX_TABLE_DATA, CUSTOM_ATTR_NAME_ITEM_DATA
)
# Py2/3
if six.PY3:
from unittest.mock import patch
from unittest.mock import MagicMock
else:
from mock import patch
from mock import MagicMock
PATCH_METHOD = 'botocore.operation.Operation.call'
SESSION_PATCH_METHODD = 'botocore.session.get_session'
class OldStyleModel(Model):
_table_name = 'IndexedModel'
user_name = UnicodeAttribute(hash_key=True)
class EmailIndex(GlobalSecondaryIndex):
"""
A global secondary index for email addresses
"""
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
email = UnicodeAttribute(hash_key=True)
numbers = NumberSetAttribute(range_key=True)
class LocalEmailIndex(LocalSecondaryIndex):
"""
A global secondary index for email addresses
"""
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
email = UnicodeAttribute(hash_key=True)
numbers = NumberSetAttribute(range_key=True)
class NonKeyAttrIndex(LocalSecondaryIndex):
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = IncludeProjection(non_attr_keys=['numbers'])
email = UnicodeAttribute(hash_key=True)
numbers = NumberSetAttribute(range_key=True)
class IndexedModel(Model):
"""
A model with an index
"""
class Meta:
table_name = 'IndexedModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
email_index = EmailIndex()
include_index = NonKeyAttrIndex()
numbers = NumberSetAttribute()
aliases = UnicodeSetAttribute()
icons = BinarySetAttribute()
class LocalIndexedModel(Model):
"""
A model with an index
"""
class Meta:
table_name = 'LocalIndexedModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
email_index = LocalEmailIndex()
numbers = NumberSetAttribute()
aliases = UnicodeSetAttribute()
icons = BinarySetAttribute()
class SimpleUserModel(Model):
"""
A hash key only model
"""
class Meta:
table_name = 'SimpleModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
numbers = NumberSetAttribute()
custom_aliases = UnicodeSetAttribute(attr_name='aliases')
icons = BinarySetAttribute()
views = NumberAttribute(null=True)
class ThrottledUserModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'UserModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
throttle = Throttle('50')
class CustomAttrIndex(LocalSecondaryIndex):
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
overidden_uid = UnicodeAttribute(hash_key=True, attr_name='user_id')
class CustomAttrNameModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'CustomAttrModel'
overidden_user_name = UnicodeAttribute(hash_key=True, attr_name='user_name')
overidden_user_id = UnicodeAttribute(range_key=True, attr_name='user_id')
overidden_attr = UnicodeAttribute(attr_name='foo_attr', null=True)
uid_index = CustomAttrIndex()
class UserModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'UserModel'
custom_user_name = UnicodeAttribute(hash_key=True, attr_name='user_name')
user_id = UnicodeAttribute(range_key=True)
picture = BinaryAttribute(null=True)
zip_code = NumberAttribute(null=True)
email = UnicodeAttribute(default='needs_email')
callable_field = NumberAttribute(default=lambda: 42)
class HostSpecificModel(Model):
"""
A testing model
"""
class Meta:
host = 'http://localhost'
table_name = 'RegionSpecificModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
class RegionSpecificModel(Model):
"""
A testing model
"""
class Meta:
region = 'us-west-1'
table_name = 'RegionSpecificModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
class ComplexKeyModel(Model):
"""
This model has a key that must be serialized/deserialized properly
"""
class Meta:
table_name = 'ComplexKey'
name = UnicodeAttribute(hash_key=True)
date_created = UTCDateTimeAttribute(default=datetime.utcnow)
class ModelTestCase(TestCase):
"""
Tests for the models API
"""
def assert_dict_lists_equal(self, list1, list2):
"""
Compares two lists of dictionariess
"""
for d1_item in list1:
found = False
for d2_item in list2:
if d2_item.items() == d1_item.items():
found = True
if not found:
if six.PY3:
#TODO WTF python2?
raise AssertionError("Values not equal: {0} {1}".format(d1_item, list2))
def test_create_model(self):
"""
Model.create_table
"""
self.maxDiff = None
scope_args = {'count': 0}
def fake_dynamodb(obj, **kwargs):
if kwargs == {'table_name': UserModel.Meta.table_name}:
if scope_args['count'] == 0:
return HttpBadRequest(), {}
else:
return MODEL_TABLE_DATA
else:
return HttpOK(content={}), {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as outer:
with patch("pynamodb.connection.TableConnection.describe_table") as req:
req.return_value = None
with self.assertRaises(ValueError):
UserModel.create_table(read_capacity_units=2, write_capacity_units=2, wait=True)
with patch(PATCH_METHOD, new=fake_db) as req:
UserModel.create_table(read_capacity_units=2, write_capacity_units=2)
# Test for default region
self.assertEqual(UserModel.Meta.region, 'us-east-1')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK, MODEL_TABLE_DATA
UserModel.create_table(read_capacity_units=2, write_capacity_units=2)
# The default region is us-east-1
self.assertEqual(req.call_args[0][0].region_name, 'us-east-1')
# A table with a specified region
self.assertEqual(RegionSpecificModel.Meta.region, 'us-west-1')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK, MODEL_TABLE_DATA
RegionSpecificModel.create_table(read_capacity_units=2, write_capacity_units=2)
self.assertEqual(req.call_args[0][0].region_name, 'us-west-1')
# A table with a specified host
self.assertEqual(HostSpecificModel.Meta.host, 'http://localhost')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK, MODEL_TABLE_DATA
HostSpecificModel.create_table(read_capacity_units=2, write_capacity_units=2)
self.assertEqual(req.call_args[0][0].host, 'http://localhost')
def fake_wait(obj, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
return HttpBadRequest(), {}
elif scope_args['count'] == 1 or scope_args['count'] == 2:
data = copy.deepcopy(MODEL_TABLE_DATA)
data['Table']['TableStatus'] = 'Creating'
scope_args['count'] += 1
return HttpOK(content=data), data
else:
return HttpOK(MODEL_TABLE_DATA), MODEL_TABLE_DATA
mock_wait = MagicMock()
mock_wait.side_effect = fake_wait
scope_args = {'count': 0}
with patch(PATCH_METHOD, new=mock_wait) as req:
UserModel.create_table(read_capacity_units=2, write_capacity_units=2, wait=True)
def bad_server(obj, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
return HttpBadRequest(), {}
elif scope_args['count'] == 1 or scope_args['count'] == 2:
return HttpBadRequest(), {}
bad_mock_server = MagicMock()
bad_mock_server.side_effect = bad_server
scope_args = {'count': 0}
with patch(PATCH_METHOD, new=bad_mock_server) as req:
self.assertRaises(
TableError,
UserModel.create_table,
read_capacity_units=2,
write_capacity_units=2,
wait=True
)
def test_model_attrs(self):
"""
Model()
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(MODEL_TABLE_DATA), MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
self.assertEqual(item.email, 'needs_email')
self.assertEqual(item.callable_field, 42)
self.assertEqual(repr(item), '{0}<{1}, {2}>'.format(UserModel.Meta.table_name, item.custom_user_name, item.user_id))
self.assertEqual(repr(UserModel._get_meta_data()), 'MetaTable<{0}>'.format('Thread'))
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(SIMPLE_MODEL_TABLE_DATA), SIMPLE_MODEL_TABLE_DATA
item = SimpleUserModel('foo')
self.assertEqual(repr(item), '{0}<{1}>'.format(SimpleUserModel.Meta.table_name, item.user_name))
self.assertRaises(ValueError, item.save)
self.assertRaises(ValueError, UserModel.from_raw_data, None)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
item = CustomAttrNameModel('foo', 'bar', overidden_attr='test')
self.assertEqual(item.overidden_attr, 'test')
self.assertTrue(not hasattr(item, 'foo_attr'))
def test_overidden_defaults(self):
"""
Custom attribute names
"""
schema = CustomAttrNameModel._get_schema()
correct_schema = {
'key_schema': [
{'key_type': 'HASH', 'attribute_name': 'user_name'},
{'key_type': 'RANGE', 'attribute_name': 'user_id'}
],
'attribute_definitions': [
{'attribute_type': 'S', 'attribute_name': 'user_name'},
{'attribute_type': 'S', 'attribute_name': 'user_id'}
]
}
self.assert_dict_lists_equal(correct_schema['key_schema'], schema['key_schema'])
self.assert_dict_lists_equal(correct_schema['attribute_definitions'], schema['attribute_definitions'])
def test_refresh(self):
"""
Model.refresh
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
self.assertRaises(item.DoesNotExist, item.refresh)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(GET_MODEL_ITEM_DATA), GET_MODEL_ITEM_DATA
item.refresh()
self.assertEqual(
item.user_name,
GET_MODEL_ITEM_DATA.get(ITEM).get('user_name').get(STRING_SHORT))
def test_complex_key(self):
"""
Model with complex key
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), COMPLEX_TABLE_DATA
item = ComplexKeyModel('test')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(COMPLEX_ITEM_DATA), COMPLEX_ITEM_DATA
item.refresh()
def test_delete(self):
"""
Model.delete
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
item.delete()
params = {
'key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
args = req.call_args[1]
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
item.delete(user_id='bar')
params = {
'key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'expected': {
'user_id': {
'Value': {'S': 'bar'},
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
args = req.call_args[1]
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), None
item.delete(user_id='bar')
params = {
'key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'expected': {
'user_id': {
'Value': {'S': 'bar'},
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
args = req.call_args[1]
self.assertEqual(args, params)
def test_update_item(self):
"""
Model.update_item
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), SIMPLE_MODEL_TABLE_DATA
item = SimpleUserModel('foo', email='bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {}
item.save()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add')
args = req.call_args[1]
params = {
'table_name': 'SimpleModel',
'return_values': 'ALL_NEW',
'key': {
'user_name': {
'S': 'foo'
}
},
'attribute_updates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', user_name='foo')
args = req.call_args[1]
params = {
'table_name': 'SimpleModel',
'return_values': 'ALL_NEW',
'key': {
'user_name': {
'S': 'foo'
}
},
'expected': {
'user_name': {
'Value': {'S': 'foo'}
}
},
'attribute_updates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', user_name__exists=False)
args = req.call_args[1]
params = {
'table_name': 'SimpleModel',
'return_values': 'ALL_NEW',
'key': {
'user_name': {
'S': 'foo'
}
},
'expected': {
'user_name': {'Exists': False}
},
'attribute_updates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'return_consumed_capacity': 'TOTAL'
}
self.assertEqual(args, params)
def test_save(self):
"""
Model.save
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
item.save()
args = req.call_args[1]
params = {
'item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
item.save(email__exists=False)
args = req.call_args[1]
params = {
'item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'expected': {
'email': {
'Exists': False
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
item.save(user_name='bar')
args = req.call_args[1]
params = {
'item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'expected': {
'user_name': {
'Value': {'S': 'bar'}
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
item.save(custom_user_name='foo')
args = req.call_args[1]
params = {
'item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'expected': {
'user_name': {
'Value': {'S': 'foo'}
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
self.assertEqual(args, params)
def test_query(self):
"""
Model.query
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__between=['id-1', 'id-3']):
queried.append(item._serialize().get(RANGE))
self.assertListEqual(
[item.get('user_id').get(STRING_SHORT) for item in items],
queried
)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__gt='id-1', user_id__le='id-2'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__lt='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__ge='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__le='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__eq='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__begins_with='id'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in UserModel.query('foo'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
def fake_query(*args, **kwargs):
start_key = kwargs.get(pythonic(EXCLUSIVE_START_KEY), None)
if start_key:
item_idx = 0
for query_item in BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name):
item_idx += 1
if query_item == start_key:
break
query_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[item_idx:item_idx+1]
else:
query_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[:1]
data = {
ITEMS: query_items,
LAST_EVALUATED_KEY: query_items[-1] if len(query_items) else None
}
return HttpOK(data), data
mock_query = MagicMock()
mock_query.side_effect = fake_query
with patch(PATCH_METHOD, new=mock_query) as req:
for item in UserModel.query('foo'):
self.assertIsNotNone(item)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), {ITEMS: [CUSTOM_ATTR_NAME_ITEM_DATA.get(ITEM)]}
for item in CustomAttrNameModel.query('bar', overidden_user_name__eq='foo'):
self.assertIsNotNone(item)
def test_scan(self):
"""
Model.scan
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
scanned_items = []
for item in UserModel.scan():
scanned_items.append(item._serialize().get(RANGE))
self.assertListEqual(
[item.get('user_id').get(STRING_SHORT) for item in items],
scanned_items
)
def fake_scan(*args, **kwargs):
start_key = kwargs.get(pythonic(EXCLUSIVE_START_KEY), None)
if start_key:
item_idx = 0
for scan_item in BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name):
item_idx += 1
if scan_item == start_key:
break
scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[item_idx:item_idx+1]
else:
scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[:1]
data = {
ITEMS: scan_items,
LAST_EVALUATED_KEY: scan_items[-1] if len(scan_items) else None
}
return HttpOK(data), data
mock_scan = MagicMock()
mock_scan.side_effect = fake_scan
with patch(PATCH_METHOD, new=mock_scan) as req:
for item in UserModel.scan():
self.assertIsNotNone(item)
def test_get(self):
"""
Model.get
"""
def fake_dynamodb(*args, **kwargs):
if kwargs == {'table_name': UserModel.Meta.table_name}:
return HttpOK(MODEL_TABLE_DATA), MODEL_TABLE_DATA
elif kwargs == {
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel',
'key': {'user_name': {'S': 'foo'},
'user_id': {'S': 'bar'}}, 'consistent_read': False}:
return HttpOK(GET_MODEL_ITEM_DATA), GET_MODEL_ITEM_DATA
return HttpOK(), MODEL_TABLE_DATA
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
item = UserModel.get(
'foo',
'bar'
)
self.assertEqual(item._get_keys(), {'user_id': 'bar', 'user_name': 'foo'})
params = {
'consistent_read': False,
'key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'return_consumed_capacity': 'TOTAL',
'table_name': 'UserModel'
}
self.assertEqual(req.call_args[1], params)
item.zip_code = 88030
self.assertEqual(item.zip_code, 88030)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
self.assertRaises(UserModel.DoesNotExist, UserModel.get, 'foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
self.assertRaises(CustomAttrNameModel.DoesNotExist, CustomAttrNameModel.get, 'foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), CUSTOM_ATTR_NAME_ITEM_DATA
item = CustomAttrNameModel.get('foo', 'bar')
self.assertEqual(item.overidden_attr, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['foo_attr']['S'])
self.assertEqual(item.overidden_user_name, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['user_name']['S'])
self.assertEqual(item.overidden_user_id, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['user_id']['S'])
def test_batch_get(self):
"""
Model.batch_get
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), SIMPLE_MODEL_TABLE_DATA
SimpleUserModel('foo')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), SIMPLE_BATCH_GET_ITEMS
item_keys = ['hash-{0}'.format(x) for x in range(10)]
for item in SimpleUserModel.batch_get(item_keys):
self.assertIsNotNone(item)
params = {
'return_consumed_capacity': 'TOTAL',
'request_items': {
'SimpleModel': {
'Keys': [
{'user_name': {'S': 'hash-9'}},
{'user_name': {'S': 'hash-8'}},
{'user_name': {'S': 'hash-7'}},
{'user_name': {'S': 'hash-6'}},
{'user_name': {'S': 'hash-5'}},
{'user_name': {'S': 'hash-4'}},
{'user_name': {'S': 'hash-3'}},
{'user_name': {'S': 'hash-2'}},
{'user_name': {'S': 'hash-1'}},
{'user_name': {'S': 'hash-0'}}
]
}
}
}
self.assertEqual(params, req.call_args[1])
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
item_keys = [('hash-{0}'.format(x), '{0}'.format(x)) for x in range(10)]
req.return_value = HttpOK(), BATCH_GET_ITEMS
for item in UserModel.batch_get(item_keys):
self.assertIsNotNone(item)
params = {
'request_items': {
'UserModel': {
'Keys': [
{'user_name': {'S': 'hash-0'}, 'user_id': {'S': '0'}},
{'user_name': {'S': 'hash-1'}, 'user_id': {'S': '1'}},
{'user_name': {'S': 'hash-2'}, 'user_id': {'S': '2'}},
{'user_name': {'S': 'hash-3'}, 'user_id': {'S': '3'}},
{'user_name': {'S': 'hash-4'}, 'user_id': {'S': '4'}},
{'user_name': {'S': 'hash-5'}, 'user_id': {'S': '5'}},
{'user_name': {'S': 'hash-6'}, 'user_id': {'S': '6'}},
{'user_name': {'S': 'hash-7'}, 'user_id': {'S': '7'}},
{'user_name': {'S': 'hash-8'}, 'user_id': {'S': '8'}},
{'user_name': {'S': 'hash-9'}, 'user_id': {'S': '9'}}
]
}
}
}
args = req.call_args[1]
self.assertTrue('request_items' in params)
self.assertTrue('UserModel' in params['request_items'])
self.assertTrue('Keys' in params['request_items']['UserModel'])
self.assert_dict_lists_equal(
params['request_items']['UserModel']['Keys'],
args['request_items']['UserModel']['Keys'],
)
def fake_batch_get(*batch_args, **kwargs):
if pythonic(REQUEST_ITEMS) in kwargs:
batch_item = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name).get(KEYS)[0]
batch_items = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name).get(KEYS)[1:]
response = {
UNPROCESSED_KEYS: {
UserModel.Meta.table_name: {
KEYS: batch_items
}
},
RESPONSES: {
UserModel.Meta.table_name: [batch_item]
}
}
return HttpOK(response), response
return HttpOK({}), {}
batch_get_mock = MagicMock()
batch_get_mock.side_effect = fake_batch_get
with patch(PATCH_METHOD, new=batch_get_mock) as req:
item_keys = [('hash-{0}'.format(x), '{0}'.format(x)) for x in range(200)]
for item in UserModel.batch_get(item_keys):
self.assertIsNotNone(item)
def test_batch_write(self):
"""
Model.batch_write
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK({}), {}
with UserModel.batch_write(auto_commit=False) as batch:
pass
with UserModel.batch_write() as batch:
self.assertIsNone(batch.commit())
with self.assertRaises(ValueError):
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(26)]
for item in items:
batch.delete(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(25)]
for item in items:
batch.delete(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(25)]
for item in items:
batch.save(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write() as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(30)]
for item in items:
batch.delete(item)
with UserModel.batch_write() as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(30)]
for item in items:
batch.save(item)
def fake_unprocessed_keys(*args, **kwargs):
if pythonic(REQUEST_ITEMS) in kwargs:
batch_items = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name)[1:]
unprocessed = {
UNPROCESSED_KEYS: {
UserModel.Meta.table_name: batch_items
}
}
return HttpOK(unprocessed), unprocessed
return HttpOK({}), {}
batch_write_mock = MagicMock()
batch_write_mock.side_effect = fake_unprocessed_keys
with patch(PATCH_METHOD, new=batch_write_mock) as req:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(500)]
for item in items:
batch.save(item)
def test_index_queries(self):
"""
Models.Index.Query
"""
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), INDEX_TABLE_DATA
IndexedModel._get_connection().describe_table()
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LOCAL_INDEX_TABLE_DATA
LocalIndexedModel._get_meta_data()
queried = []
# user_id not valid
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', user_id__between=['id-1', 'id-3']):
queried.append(item._serialize().get(RANGE))
# startswith not valid
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', user_name__startswith='foo'):
queried.append(item._serialize().get(RANGE))
# name not valid
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', name='foo'):
queried.append(item._serialize().get(RANGE))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
item['email'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in IndexedModel.email_index.query('foo', limit=2, user_name__begins_with='bar'):
queried.append(item._serialize())
params = {
'key_conditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'email': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'index_name': 'email_index',
'table_name': 'IndexedModel',
'return_consumed_capacity': 'TOTAL',
'limit': 2
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
item['email'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in LocalIndexedModel.email_index.query('foo', limit=1, user_name__begins_with='bar'):
queried.append(item._serialize())
params = {
'key_conditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'email': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'index_name': 'email_index',
'table_name': 'LocalIndexedModel',
'return_consumed_capacity': 'TOTAL',
'limit': 1
}
self.assertEqual(req.call_args[1], params)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = HttpOK({'Items': items}), {'Items': items}
queried = []
for item in CustomAttrNameModel.uid_index.query('foo', limit=2, user_name__begins_with='bar'):
queried.append(item._serialize())
params = {
'key_conditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'user_id': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'index_name': 'uid_index',
'table_name': 'CustomAttrModel',
'return_consumed_capacity': 'TOTAL',
'limit': 2
}
self.assertEqual(req.call_args[1], params)
def test_global_index(self):
"""
Models.GlobalSecondaryIndex
"""
self.assertIsNotNone(IndexedModel.email_index._hash_key_attribute())
self.assertEqual(IndexedModel.email_index.Meta.projection.projection_type, AllProjection.projection_type)
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), INDEX_TABLE_DATA
with self.assertRaises(ValueError):
IndexedModel('foo', 'bar')
IndexedModel._get_meta_data()
scope_args = {'count': 0}
def fake_dynamodb(obj, **kwargs):
return HttpOK(content={}), {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
IndexedModel.create_table(read_capacity_units=2, write_capacity_units=2)
params = {
'attribute_definitions': [
{'attribute_name': 'email', 'attribute_type': 'S'},
{'attribute_name': 'numbers', 'attribute_type': 'NS'}
],
'key_schema': [
{'AttributeName': 'numbers', 'KeyType': 'RANGE'},
{'AttributeName': 'email', 'KeyType': 'HASH'}
]
}
schema = IndexedModel.email_index.get_schema()
args = req.call_args[1]
self.assertEqual(
args['global_secondary_indexes'][0]['ProvisionedThroughput'],
{
'ReadCapacityUnits': 2,
'WriteCapacityUnits': 1
}
)
self.assert_dict_lists_equal(schema['key_schema'], params['key_schema'])
self.assert_dict_lists_equal(schema['attribute_definitions'], params['attribute_definitions'])
def test_local_index(self):
"""
Models.LocalSecondaryIndex
"""
with self.assertRaises(ValueError):
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LOCAL_INDEX_TABLE_DATA
# This table has no range key
LocalIndexedModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = HttpOK(), LOCAL_INDEX_TABLE_DATA
LocalIndexedModel('foo')
scope_args = {'count': 0}
schema = IndexedModel._get_indexes()
expected = {
'local_secondary_indexes': [
{
'key_schema': [
{'KeyType': 'HASH', 'AttributeName': 'email'},
{'KeyType': 'RANGE', 'AttributeName': 'numbers'}
],
'index_name': 'include_index',
'projection': {
'ProjectionType': 'INCLUDE',
'NonKeyAttributes': ['numbers']
}
}
],
'global_secondary_indexes': [
{
'key_schema': [
{'KeyType': 'HASH', 'AttributeName': 'email'},
{'KeyType': 'RANGE', 'AttributeName': 'numbers'}
],
'index_name': 'email_index',
'projection': {'ProjectionType': 'ALL'},
'provisioned_throughput': {
'WriteCapacityUnits': 1,
'ReadCapacityUnits': 2
}
}
],
'attribute_definitions': [
{'attribute_type': 'S', 'attribute_name': 'email'},
{'attribute_type': 'NS', 'attribute_name': 'numbers'},
{'attribute_type': 'S', 'attribute_name': 'email'},
{'attribute_type': 'NS', 'attribute_name': 'numbers'}
]
}
self.assert_dict_lists_equal(
schema['attribute_definitions'],
expected['attribute_definitions']
)
self.assertEqual(schema['local_secondary_indexes'][0]['projection']['ProjectionType'], 'INCLUDE')
self.assertEqual(schema['local_secondary_indexes'][0]['projection']['NonKeyAttributes'], ['numbers'])
def fake_dynamodb(obj, **kwargs):
return HttpOK(content={}), {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
LocalIndexedModel.create_table(read_capacity_units=2, write_capacity_units=2)
params = collections.OrderedDict({
'attribute_definitions': [
{
'attribute_name': 'email', 'attribute_type': 'S'
},
{
'attribute_name': 'numbers',
'attribute_type': 'NS'
}
],
'key_schema': [
{
'AttributeName': 'email', 'KeyType': 'HASH'
},
{
'AttributeName': 'numbers', 'KeyType': 'RANGE'
}
]
})
schema = LocalIndexedModel.email_index.get_schema()
args = req.call_args[1]
self.assert_dict_lists_equal(schema['attribute_definitions'], params['attribute_definitions'])
self.assert_dict_lists_equal(schema['key_schema'], params['key_schema'])
self.assertTrue('ProvisionedThroughput' not in args['local_secondary_indexes'][0])
def test_projections(self):
"""
Models.Projection
"""
projection = AllProjection()
self.assertEqual(projection.projection_type, ALL)
projection = KeysOnlyProjection()
self.assertEqual(projection.projection_type, KEYS_ONLY)
projection = IncludeProjection(non_attr_keys=['foo', 'bar'])
self.assertEqual(projection.projection_type, INCLUDE)
self.assertEqual(projection.non_key_attributes, ['foo', 'bar'])
self.assertRaises(ValueError, IncludeProjection, None)
with self.assertRaises(ValueError):
class BadIndex(Index):
pass
BadIndex()
with self.assertRaises(ValueError):
class BadIndex(Index):
class Meta:
pass
pass
BadIndex()
def test_throttle(self):
"""
Throttle.add_record
"""
throt = Throttle(30)
throt.add_record(None)
for i in range(10):
throt.add_record(1)
throt.throttle()
for i in range(2):
throt.add_record(50)
throt.throttle()
def test_old_style_model_exception(self):
"""
Display warning for pre v1.0 Models
"""
with self.assertRaises(AttributeError):
OldStyleModel._get_meta_data()
with self.assertRaises(AttributeError):
OldStyleModel.exists()
| {
"content_hash": "bb761e25f9efacb2ce914d61c16deaff",
"timestamp": "",
"source": "github",
"line_count": 1430,
"max_line_length": 128,
"avg_line_length": 36.765034965034964,
"alnum_prop": 0.47706090462966483,
"repo_name": "mtsgrd/PynamoDB2",
"id": "0f679086e7ad458a840741791791082567700daf",
"size": "52574",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "pynamodb/tests/test_model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "265307"
}
],
"symlink_target": ""
} |
from cx_Freeze import setup, Executable
executables = [Executable('example.py', targetName='hello_world.exe')]
excludes = ['unicodedata', 'logging', 'unittest', 'email', 'html', 'http', 'urllib',
'xml', 'pydoc', 'doctest', 'argparse', 'datetime', 'zipfile',
'subprocess', 'pickle', 'threading', 'locale', 'calendar', 'functools',
'weakref', 'tokenize', 'base64', 'gettext', 'heapq', 're', 'operator',
'bz2', 'fnmatch', 'getopt', 'reprlib', 'string', 'stringprep',
'contextlib', 'quopri', 'copy', 'imp', 'keyword', 'linecache']
includes = ['json']
zip_include_packages = ['collections', 'encodings', 'importlib']
options = {
'build_exe': {
'include_msvcr': True,
'excludes': excludes,
'includes': includes,
'zip_include_packages': zip_include_packages,
'build_exe': 'build_windows',
}
}
setup(name='hello_world',
version='0.0.8',
description='My Hello World App!',
executables=executables,
options=options)
| {
"content_hash": "9294afcf322ec534872d2a4e656442c2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 84,
"avg_line_length": 34.8,
"alnum_prop": 0.5890804597701149,
"repo_name": "Jenyay/cx_freeze_examples",
"id": "a1268e900f5e6db5d9f5753de572a8d074b2897b",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example 08-1/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19489"
}
],
"symlink_target": ""
} |
'''
Custom filters for use in openshift-master
'''
import copy
import sys
import yaml
from ansible import errors
from ansible.runner.filter_plugins.core import bool as ansible_bool
class IdentityProviderBase(object):
""" IdentityProviderBase
Attributes:
name (str): Identity provider Name
login (bool): Is this identity provider a login provider?
challenge (bool): Is this identity provider a challenge provider?
provider (dict): Provider specific config
_idp (dict): internal copy of the IDP dict passed in
_required (list): List of lists of strings for required attributes
_optional (list): List of lists of strings for optional attributes
_allow_additional (bool): Does this provider support attributes
not in _required and _optional
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
# disabling this check since the number of instance attributes are
# necessary for this class
# pylint: disable=too-many-instance-attributes
def __init__(self, api_version, idp):
if api_version not in ['v1']:
raise errors.AnsibleFilterError("|failed api version {0} unknown".format(api_version))
self._idp = copy.deepcopy(idp)
if 'name' not in self._idp:
raise errors.AnsibleFilterError("|failed identity provider missing a name")
if 'kind' not in self._idp:
raise errors.AnsibleFilterError("|failed identity provider missing a kind")
self.name = self._idp.pop('name')
self.login = ansible_bool(self._idp.pop('login', False))
self.challenge = ansible_bool(self._idp.pop('challenge', False))
self.provider = dict(apiVersion=api_version, kind=self._idp.pop('kind'))
mm_keys = ('mappingMethod', 'mapping_method')
mapping_method = None
for key in mm_keys:
if key in self._idp:
mapping_method = self._idp[key]
if mapping_method is None:
mapping_method = self.get_default('mappingMethod')
self.mapping_method = mapping_method
valid_mapping_methods = ['add', 'claim', 'generate', 'lookup']
if self.mapping_method not in valid_mapping_methods:
raise errors.AnsibleFilterError("|failed unkown mapping method "
"for provider {0}".format(self.__class__.__name__))
self._required = []
self._optional = []
self._allow_additional = True
@staticmethod
def validate_idp_list(idp_list):
''' validates a list of idps '''
login_providers = [x.name for x in idp_list if x.login]
if len(login_providers) > 1:
raise errors.AnsibleFilterError("|failed multiple providers are "
"not allowed for login. login "
"providers: {0}".format(', '.join(login_providers)))
names = [x.name for x in idp_list]
if len(set(names)) != len(names):
raise errors.AnsibleFilterError("|failed more than one provider configured with the same name")
for idp in idp_list:
idp.validate()
def validate(self):
''' validate an instance of this idp class '''
pass
@staticmethod
def get_default(key):
''' get a default value for a given key '''
if key == 'mappingMethod':
return 'claim'
else:
return None
def set_provider_item(self, items, required=False):
''' set a provider item based on the list of item names provided. '''
for item in items:
provider_key = items[0]
if item in self._idp:
self.provider[provider_key] = self._idp.pop(item)
break
else:
default = self.get_default(provider_key)
if default is not None:
self.provider[provider_key] = default
elif required:
raise errors.AnsibleFilterError("|failed provider {0} missing "
"required key {1}".format(self.__class__.__name__, provider_key))
def set_provider_items(self):
''' set the provider items for this idp '''
for items in self._required:
self.set_provider_item(items, True)
for items in self._optional:
self.set_provider_item(items)
if self._allow_additional:
for key in self._idp.keys():
self.set_provider_item([key])
else:
if len(self._idp) > 0:
raise errors.AnsibleFilterError("|failed provider {0} "
"contains unknown keys "
"{1}".format(self.__class__.__name__, ', '.join(self._idp.keys())))
def to_dict(self):
''' translate this idp to a dictionary '''
return dict(name=self.name, challenge=self.challenge,
login=self.login, mappingMethod=self.mapping_method,
provider=self.provider)
class LDAPPasswordIdentityProvider(IdentityProviderBase):
""" LDAPPasswordIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['attributes'], ['url'], ['insecure']]
self._optional += [['ca'],
['bindDN', 'bind_dn'],
['bindPassword', 'bind_password']]
self._idp['insecure'] = ansible_bool(self._idp.pop('insecure', False))
if 'attributes' in self._idp and 'preferred_username' in self._idp['attributes']:
pref_user = self._idp['attributes'].pop('preferred_username')
self._idp['attributes']['preferredUsername'] = pref_user
def validate(self):
''' validate this idp instance '''
IdentityProviderBase.validate(self)
if not isinstance(self.provider['attributes'], dict):
raise errors.AnsibleFilterError("|failed attributes for provider "
"{0} must be a dictionary".format(self.__class__.__name__))
attrs = ['id', 'email', 'name', 'preferredUsername']
for attr in attrs:
if attr in self.provider['attributes'] and not isinstance(self.provider['attributes'][attr], list):
raise errors.AnsibleFilterError("|failed {0} attribute for "
"provider {1} must be a list".format(attr, self.__class__.__name__))
unknown_attrs = set(self.provider['attributes'].keys()) - set(attrs)
if len(unknown_attrs) > 0:
raise errors.AnsibleFilterError("|failed provider {0} has unknown "
"attributes: {1}".format(self.__class__.__name__, ', '.join(unknown_attrs)))
class KeystonePasswordIdentityProvider(IdentityProviderBase):
""" KeystoneIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['url'], ['domainName', 'domain_name']]
self._optional += [['ca'], ['certFile', 'cert_file'], ['keyFile', 'key_file']]
class RequestHeaderIdentityProvider(IdentityProviderBase):
""" RequestHeaderIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['headers']]
self._optional += [['challengeURL', 'challenge_url'],
['loginURL', 'login_url'],
['clientCA', 'client_ca']]
def validate(self):
''' validate this idp instance '''
IdentityProviderBase.validate(self)
if not isinstance(self.provider['headers'], list):
raise errors.AnsibleFilterError("|failed headers for provider {0} "
"must be a list".format(self.__class__.__name__))
class AllowAllPasswordIdentityProvider(IdentityProviderBase):
""" AllowAllPasswordIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
class DenyAllPasswordIdentityProvider(IdentityProviderBase):
""" DenyAllPasswordIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
class HTPasswdPasswordIdentityProvider(IdentityProviderBase):
""" HTPasswdPasswordIdentity
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['file', 'filename', 'fileName', 'file_name']]
@staticmethod
def get_default(key):
if key == 'file':
return '/etc/origin/htpasswd'
else:
return IdentityProviderBase.get_default(key)
class BasicAuthPasswordIdentityProvider(IdentityProviderBase):
""" BasicAuthPasswordIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['url']]
self._optional += [['ca'], ['certFile', 'cert_file'], ['keyFile', 'key_file']]
class IdentityProviderOauthBase(IdentityProviderBase):
""" IdentityProviderOauthBase
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderBase.__init__(self, api_version, idp)
self._allow_additional = False
self._required += [['clientID', 'client_id'], ['clientSecret', 'client_secret']]
def validate(self):
''' validate this idp instance '''
IdentityProviderBase.validate(self)
if self.challenge:
raise errors.AnsibleFilterError("|failed provider {0} does not "
"allow challenge authentication".format(self.__class__.__name__))
class OpenIDIdentityProvider(IdentityProviderOauthBase):
""" OpenIDIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderOauthBase.__init__(self, api_version, idp)
self._required += [['claims'], ['urls']]
self._optional += [['ca'],
['extraScopes'],
['extraAuthorizeParameters']]
if 'claims' in self._idp and 'preferred_username' in self._idp['claims']:
pref_user = self._idp['claims'].pop('preferred_username')
self._idp['claims']['preferredUsername'] = pref_user
if 'urls' in self._idp and 'user_info' in self._idp['urls']:
user_info = self._idp['urls'].pop('user_info')
self._idp['urls']['userInfo'] = user_info
if 'extra_scopes' in self._idp:
self._idp['extraScopes'] = self._idp.pop('extra_scopes')
if 'extra_authorize_parameters' in self._idp:
self._idp['extraAuthorizeParameters'] = self._idp.pop('extra_authorize_parameters')
if 'extraAuthorizeParameters' in self._idp:
if 'include_granted_scopes' in self._idp['extraAuthorizeParameters']:
val = ansible_bool(self._idp['extraAuthorizeParameters'].pop('include_granted_scopes'))
self._idp['extraAuthorizeParameters']['include_granted_scopes'] = val
def validate(self):
''' validate this idp instance '''
IdentityProviderOauthBase.validate(self)
if not isinstance(self.provider['claims'], dict):
raise errors.AnsibleFilterError("|failed claims for provider {0} "
"must be a dictionary".format(self.__class__.__name__))
if 'extraScopes' not in self.provider['extraScopes'] and not isinstance(self.provider['extraScopes'], list):
raise errors.AnsibleFilterError("|failed extraScopes for provider "
"{0} must be a list".format(self.__class__.__name__))
if ('extraAuthorizeParameters' not in self.provider['extraAuthorizeParameters']
and not isinstance(self.provider['extraAuthorizeParameters'], dict)):
raise errors.AnsibleFilterError("|failed extraAuthorizeParameters "
"for provider {0} must be a dictionary".format(self.__class__.__name__))
required_claims = ['id']
optional_claims = ['email', 'name', 'preferredUsername']
all_claims = required_claims + optional_claims
for claim in required_claims:
if claim in required_claims and claim not in self.provider['claims']:
raise errors.AnsibleFilterError("|failed {0} claim missing "
"for provider {1}".format(claim, self.__class__.__name__))
for claim in all_claims:
if claim in self.provider['claims'] and not isinstance(self.provider['claims'][claim], list):
raise errors.AnsibleFilterError("|failed {0} claims for "
"provider {1} must be a list".format(claim, self.__class__.__name__))
unknown_claims = set(self.provider['claims'].keys()) - set(all_claims)
if len(unknown_claims) > 0:
raise errors.AnsibleFilterError("|failed provider {0} has unknown "
"claims: {1}".format(self.__class__.__name__, ', '.join(unknown_claims)))
if not isinstance(self.provider['urls'], dict):
raise errors.AnsibleFilterError("|failed urls for provider {0} "
"must be a dictionary".format(self.__class__.__name__))
required_urls = ['authorize', 'token']
optional_urls = ['userInfo']
all_urls = required_urls + optional_urls
for url in required_urls:
if url not in self.provider['urls']:
raise errors.AnsibleFilterError("|failed {0} url missing for "
"provider {1}".format(url, self.__class__.__name__))
unknown_urls = set(self.provider['urls'].keys()) - set(all_urls)
if len(unknown_urls) > 0:
raise errors.AnsibleFilterError("|failed provider {0} has unknown "
"urls: {1}".format(self.__class__.__name__, ', '.join(unknown_urls)))
class GoogleIdentityProvider(IdentityProviderOauthBase):
""" GoogleIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderOauthBase.__init__(self, api_version, idp)
self._optional += [['hostedDomain', 'hosted_domain']]
class GitHubIdentityProvider(IdentityProviderOauthBase):
""" GitHubIdentityProvider
Attributes:
Args:
api_version(str): OpenShift config version
idp (dict): idp config dict
Raises:
AnsibleFilterError:
"""
def __init__(self, api_version, idp):
IdentityProviderOauthBase.__init__(self, api_version, idp)
self._optional += [['organizations']]
class FilterModule(object):
''' Custom ansible filters for use by the openshift_master role'''
@staticmethod
def translate_idps(idps, api_version):
''' Translates a list of dictionaries into a valid identityProviders config '''
idp_list = []
if not isinstance(idps, list):
raise errors.AnsibleFilterError("|failed expects to filter on a list of identity providers")
for idp in idps:
if not isinstance(idp, dict):
raise errors.AnsibleFilterError("|failed identity providers must be a list of dictionaries")
cur_module = sys.modules[__name__]
idp_class = getattr(cur_module, idp['kind'], None)
idp_inst = idp_class(api_version, idp) if idp_class is not None else IdentityProviderBase(api_version, idp)
idp_inst.set_provider_items()
idp_list.append(idp_inst)
IdentityProviderBase.validate_idp_list(idp_list)
return yaml.safe_dump([idp.to_dict() for idp in idp_list], default_flow_style=False)
@staticmethod
def validate_pcs_cluster(data, masters=None):
''' Validates output from "pcs status", ensuring that each master
provided is online.
Ex: data = ('...',
'PCSD Status:',
'master1.example.com: Online',
'master2.example.com: Online',
'master3.example.com: Online',
'...')
masters = ['master1.example.com',
'master2.example.com',
'master3.example.com']
returns True
'''
if not issubclass(type(data), basestring):
raise errors.AnsibleFilterError("|failed expects data is a string or unicode")
if not issubclass(type(masters), list):
raise errors.AnsibleFilterError("|failed expects masters is a list")
valid = True
for master in masters:
if "{0}: Online".format(master) not in data:
valid = False
return valid
def filters(self):
''' returns a mapping of filters to methods '''
return {"translate_idps": self.translate_idps,
"validate_pcs_cluster": self.validate_pcs_cluster}
| {
"content_hash": "821087246c6b1ed3868bdec0704d8e8c",
"timestamp": "",
"source": "github",
"line_count": 504,
"max_line_length": 120,
"avg_line_length": 38.970238095238095,
"alnum_prop": 0.5733923934626547,
"repo_name": "themurph/openshift-tools",
"id": "c41367f05c4ac542736575496cba78f4021fb830",
"size": "19723",
"binary": false,
"copies": "9",
"ref": "refs/heads/prod",
"path": "openshift/installer/vendored/openshift-ansible-3.0.47/filter_plugins/openshift_master.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "108987"
},
{
"name": "CSS",
"bytes": "588"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "43950"
},
{
"name": "JavaScript",
"bytes": "229"
},
{
"name": "PHP",
"bytes": "35793"
},
{
"name": "Python",
"bytes": "11349806"
},
{
"name": "Shell",
"bytes": "752773"
},
{
"name": "Vim script",
"bytes": "1836"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe, json, os
from frappe.website.website_generator import WebsiteGenerator
from frappe import _, scrub
from frappe.utils import cstr
from frappe.website.utils import get_comment_list
from frappe.custom.doctype.customize_form.customize_form import docfield_properties
from frappe.core.doctype.file.file import get_max_file_size
from frappe.core.doctype.file.file import remove_file_by_url
from frappe.modules.utils import export_module_json, get_doc_module
from six.moves.urllib.parse import urlencode
from frappe.integrations.utils import get_payment_gateway_controller
from six import iteritems
class WebForm(WebsiteGenerator):
website = frappe._dict(
no_cache = 1
)
def onload(self):
super(WebForm, self).onload()
if self.is_standard and not frappe.conf.developer_mode:
self.use_meta_fields()
def validate(self):
super(WebForm, self).validate()
if not self.module:
self.module = frappe.db.get_value('DocType', self.doc_type, 'module')
if (not (frappe.flags.in_install or frappe.flags.in_patch or frappe.flags.in_test or frappe.flags.in_fixtures)
and self.is_standard and not frappe.conf.developer_mode):
frappe.throw(_("You need to be in developer mode to edit a Standard Web Form"))
if not frappe.flags.in_import:
self.validate_fields()
if self.accept_payment:
self.validate_payment_amount()
def validate_fields(self):
'''Validate all fields are present'''
from frappe.model import no_value_fields
missing = []
meta = frappe.get_meta(self.doc_type)
for df in self.web_form_fields:
if df.fieldname and (df.fieldtype not in no_value_fields and not meta.has_field(df.fieldname)):
missing.append(df.fieldname)
if missing:
frappe.throw(_('Following fields are missing:') + '<br>' + '<br>'.join(missing))
def validate_payment_amount(self):
if self.amount_based_on_field and not self.amount_field:
frappe.throw(_("Please select a Amount Field."))
elif not self.amount_based_on_field and not self.amount > 0:
frappe.throw(_("Amount must be greater than 0."))
def reset_field_parent(self):
'''Convert link fields to select with names as options'''
for df in self.web_form_fields:
df.parent = self.doc_type
def use_meta_fields(self):
'''Override default properties for standard web forms'''
meta = frappe.get_meta(self.doc_type)
for df in self.web_form_fields:
meta_df = meta.get_field(df.fieldname)
if not meta_df:
continue
for prop in docfield_properties:
if df.fieldtype==meta_df.fieldtype and prop not in ("idx",
"reqd", "default", "description", "default", "options",
"hidden", "read_only", "label"):
df.set(prop, meta_df.get(prop))
# TODO translate options of Select fields like Country
# export
def on_update(self):
"""
Writes the .txt for this page and if write_content is checked,
it will write out a .html file
"""
path = export_module_json(self, self.is_standard, self.module)
if path:
# js
if not os.path.exists(path + '.js'):
with open(path + '.js', 'w') as f:
f.write("""frappe.ready(function() {
// bind events here
})""")
# py
if not os.path.exists(path + '.py'):
with open(path + '.py', 'w') as f:
f.write("""from __future__ import unicode_literals
import frappe
def get_context(context):
# do your magic here
pass
""")
def get_context(self, context):
'''Build context to render the `web_form.html` template'''
self.set_web_form_module()
context._login_required = False
if self.login_required and frappe.session.user == "Guest":
context._login_required = True
doc, delimeter = make_route_string(frappe.form_dict)
context.doc = doc
context.delimeter = delimeter
# check permissions
if frappe.session.user == "Guest" and frappe.form_dict.name:
frappe.throw(_("You need to be logged in to access this {0}.").format(self.doc_type), frappe.PermissionError)
if frappe.form_dict.name and not has_web_form_permission(self.doc_type, frappe.form_dict.name):
frappe.throw(_("You don't have the permissions to access this document"), frappe.PermissionError)
self.reset_field_parent()
if self.is_standard:
self.use_meta_fields()
if not context._login_required:
if self.allow_edit:
if self.allow_multiple:
if not frappe.form_dict.name and not frappe.form_dict.new:
self.build_as_list(context)
else:
if frappe.session.user != 'Guest' and not frappe.form_dict.name:
frappe.form_dict.name = frappe.db.get_value(self.doc_type, {"owner": frappe.session.user}, "name")
if not frappe.form_dict.name:
# only a single doc allowed and no existing doc, hence new
frappe.form_dict.new = 1
# always render new form if login is not required or doesn't allow editing existing ones
if not self.login_required or not self.allow_edit:
frappe.form_dict.new = 1
self.load_document(context)
context.parents = self.get_parents(context)
if self.breadcrumbs:
context.parents = frappe.safe_eval(self.breadcrumbs, { "_": _ })
context.has_header = ((frappe.form_dict.name or frappe.form_dict.new)
and (frappe.session.user!="Guest" or not self.login_required))
if context.success_message:
context.success_message = frappe.db.escape(context.success_message.replace("\n",
"<br>")).strip("'")
self.add_custom_context_and_script(context)
if not context.max_attachment_size:
context.max_attachment_size = get_max_file_size() / 1024 / 1024
context.show_in_grid = self.show_in_grid
def load_document(self, context):
'''Load document `doc` and `layout` properties for template'''
if frappe.form_dict.name or frappe.form_dict.new:
context.layout = self.get_layout()
context.parents = [{"route": self.route, "label": _(self.title) }]
if frappe.form_dict.name:
context.doc = frappe.get_doc(self.doc_type, frappe.form_dict.name)
context.title = context.doc.get(context.doc.meta.get_title_field())
context.doc.add_seen()
context.reference_doctype = context.doc.doctype
context.reference_name = context.doc.name
if self.show_attachments:
context.attachments = frappe.get_all('File', filters= {"attached_to_name": context.reference_name, "attached_to_doctype": context.reference_doctype, "is_private": 0},
fields=['file_name','file_url', 'file_size'])
if self.allow_comments:
context.comment_list = get_comment_list(context.doc.doctype,
context.doc.name)
def build_as_list(self, context):
'''Web form is a list, show render as list.html'''
from frappe.www.list import get_context as get_list_context
# set some flags to make list.py/list.html happy
frappe.form_dict.web_form_name = self.name
frappe.form_dict.doctype = self.doc_type
frappe.flags.web_form = self
self.update_params_from_form_dict(context)
self.update_list_context(context)
get_list_context(context)
context.is_list = True
def update_params_from_form_dict(self, context):
'''Copy params from list view to new view'''
context.params_from_form_dict = ''
params = {}
for key, value in iteritems(frappe.form_dict):
if frappe.get_meta(self.doc_type).get_field(key):
params[key] = value
if params:
context.params_from_form_dict = '&' + urlencode(params)
def update_list_context(self, context):
'''update list context for stanard modules'''
if hasattr(self, 'web_form_module') and hasattr(self.web_form_module, 'get_list_context'):
self.web_form_module.get_list_context(context)
def get_payment_gateway_url(self, doc):
if self.accept_payment:
controller = get_payment_gateway_controller(self.payment_gateway)
title = "Payment for {0} {1}".format(doc.doctype, doc.name)
amount = self.amount
if self.amount_based_on_field:
amount = doc.get(self.amount_field)
payment_details = {
"amount": amount,
"title": title,
"description": title,
"reference_doctype": doc.doctype,
"reference_docname": doc.name,
"payer_email": frappe.session.user,
"payer_name": frappe.utils.get_fullname(frappe.session.user),
"order_id": doc.name,
"currency": self.currency,
"redirect_to": frappe.utils.get_url(self.route)
}
# Redirect the user to this url
return controller.get_payment_url(**payment_details)
def add_custom_context_and_script(self, context):
'''Update context from module if standard and append script'''
if self.web_form_module:
new_context = self.web_form_module.get_context(context)
if new_context:
context.update(new_context)
js_path = os.path.join(os.path.dirname(self.web_form_module.__file__), scrub(self.name) + '.js')
if os.path.exists(js_path):
context.script = frappe.render_template(open(js_path, 'r').read(), context)
css_path = os.path.join(os.path.dirname(self.web_form_module.__file__), scrub(self.name) + '.css')
if os.path.exists(css_path):
context.style = open(css_path, 'r').read()
def get_layout(self):
layout = []
def add_page(df=None):
new_page = {'sections': []}
layout.append(new_page)
if df and df.fieldtype=='Page Break':
new_page.update(df.as_dict())
return new_page
def add_section(df=None):
new_section = {'columns': []}
if layout:
layout[-1]['sections'].append(new_section)
if df and df.fieldtype=='Section Break':
new_section.update(df.as_dict())
return new_section
def add_column(df=None):
new_col = []
if layout:
layout[-1]['sections'][-1]['columns'].append(new_col)
return new_col
page, section, column = None, None, None
for df in self.web_form_fields:
# breaks
if df.fieldtype=='Page Break':
page = add_page(df)
section, column = None, None
if df.fieldtype=='Section Break':
section = add_section(df)
column = None
if df.fieldtype=='Column Break':
column = add_column(df)
# input
if df.fieldtype not in ('Section Break', 'Column Break', 'Page Break'):
if not page:
page = add_page()
section, column = None, None
if not section:
section = add_section()
column = None
if column==None:
column = add_column()
column.append(df)
return layout
def get_parents(self, context):
parents = None
if context.is_list and not context.parents:
parents = [{"title": _("My Account"), "name": "me"}]
elif context.parents:
parents = context.parents
return parents
def set_web_form_module(self):
'''Get custom web form module if exists'''
if self.is_standard:
self.web_form_module = get_doc_module(self.module, self.doctype, self.name)
else:
self.web_form_module = None
def validate_mandatory(self, doc):
'''Validate mandatory web form fields'''
missing = []
for f in self.web_form_fields:
if f.reqd and doc.get(f.fieldname) in (None, [], ''):
missing.append(f)
if missing:
frappe.throw(_('Mandatory Information missing:') + '<br><br>'
+ '<br>'.join(['{0} ({1})'.format(d.label, d.fieldtype) for d in missing]))
@frappe.whitelist(allow_guest=True)
def accept(web_form, data, for_payment=False):
'''Save the web form'''
data = frappe._dict(json.loads(data))
files = []
files_to_delete = []
web_form = frappe.get_doc("Web Form", web_form)
if data.doctype != web_form.doc_type:
frappe.throw(_("Invalid Request"))
elif data.name and not web_form.allow_edit:
frappe.throw(_("You are not allowed to update this Web Form Document"))
frappe.flags.in_web_form = True
meta = frappe.get_meta(data.doctype)
if data.name:
# update
doc = frappe.get_doc(data.doctype, data.name)
else:
# insert
doc = frappe.new_doc(data.doctype)
# set values
for field in web_form.web_form_fields:
fieldname = field.fieldname
df = meta.get_field(fieldname)
value = data.get(fieldname, None)
if df and df.fieldtype in ('Attach', 'Attach Image'):
if value and 'data:' and 'base64' in value:
files.append((fieldname, value))
if not doc.name:
doc.set(fieldname, '')
continue
elif not value and doc.get(fieldname):
files_to_delete.append(doc.get(fieldname))
doc.set(fieldname, value)
if for_payment:
web_form.validate_mandatory(doc)
doc.run_method('validate_payment')
if doc.name:
if has_web_form_permission(doc.doctype, doc.name, "write"):
doc.save(ignore_permissions=True)
else:
# only if permissions are present
doc.save()
else:
# insert
if web_form.login_required and frappe.session.user=="Guest":
frappe.throw(_("You must login to submit this form"))
ignore_mandatory = True if files else False
doc.insert(ignore_permissions = True, ignore_mandatory = ignore_mandatory)
# add files
if files:
for f in files:
fieldname, filedata = f
# remove earlier attached file (if exists)
if doc.get(fieldname):
remove_file_by_url(doc.get(fieldname), doctype=doc.doctype, name=doc.name)
# save new file
filename, dataurl = filedata.split(',', 1)
_file = frappe.get_doc({
"doctype": "File",
"file_name": filename,
"attached_to_doctype": doc.doctype,
"attached_to_name": doc.name,
"content": dataurl,
"decode": True})
_file.save()
# update values
doc.set(fieldname, _file.file_url)
doc.save(ignore_permissions = True)
if files_to_delete:
for f in files_to_delete:
if f:
remove_file_by_url(doc.get(fieldname), doctype=doc.doctype, name=doc.name)
frappe.flags.web_form_doc = doc
if for_payment:
return web_form.get_payment_gateway_url(doc)
else:
return doc.as_dict()
@frappe.whitelist()
def delete(web_form_name, docname):
web_form = frappe.get_doc("Web Form", web_form_name)
owner = frappe.db.get_value(web_form.doc_type, docname, "owner")
if frappe.session.user == owner and web_form.allow_delete:
frappe.delete_doc(web_form.doc_type, docname, ignore_permissions=True)
else:
raise frappe.PermissionError("Not Allowed")
@frappe.whitelist()
def delete_multiple(web_form_name, docnames):
web_form = frappe.get_doc("Web Form", web_form_name)
docnames = json.loads(docnames)
allowed_docnames = []
restricted_docnames = []
for docname in docnames:
owner = frappe.db.get_value(web_form.doc_type, docname, "owner")
if frappe.session.user == owner and web_form.allow_delete:
allowed_docnames.append(docname)
else:
restricted_docnames.append(docname)
for docname in allowed_docnames:
frappe.delete_doc(web_form.doc_type, docname, ignore_permissions=True)
if restricted_docnames:
raise frappe.PermissionError("You do not have permisssion to delete " + ", ".join(restricted_docnames))
def has_web_form_permission(doctype, name, ptype='read'):
if frappe.session.user=="Guest":
return False
# owner matches
elif frappe.db.get_value(doctype, name, "owner")==frappe.session.user:
return True
elif frappe.has_website_permission(name, ptype=ptype, doctype=doctype):
return True
elif check_webform_perm(doctype, name):
return True
else:
return False
def check_webform_perm(doctype, name):
doc = frappe.get_doc(doctype, name)
if hasattr(doc, "has_webform_permission"):
if doc.has_webform_permission():
return True
def get_web_form_list(doctype, txt, filters, limit_start, limit_page_length=20, order_by=None):
from frappe.www.list import get_list
if not filters:
filters = {}
filters["owner"] = frappe.session.user
return get_list(doctype, txt, filters, limit_start, limit_page_length, order_by=order_by,
ignore_permissions=True)
def make_route_string(parameters):
route_string = ""
delimeter = '?'
if isinstance(parameters, dict):
for key in parameters:
if key != "web_form_name":
route_string += route_string + delimeter + key + "=" + cstr(parameters[key])
delimeter = '&'
return (route_string, delimeter)
@frappe.whitelist(allow_guest=True)
def get_form_data(doctype, docname=None, web_form_name=None):
out = frappe._dict()
if docname:
doc = frappe.get_doc(doctype, docname)
if has_web_form_permission(doctype, docname, ptype='read'):
out.doc = doc
else:
frappe.throw(_("Not permitted"), frappe.PermissionError)
out.web_form = frappe.get_doc('Web Form', web_form_name)
# For Table fields, server-side processing for meta
for field in out.web_form.web_form_fields:
if field.fieldtype == "Table":
field.fields = get_in_list_view_fields(field.options)
out.update({field.fieldname: field.fields})
if field.fieldtype == "Link":
field.fieldtype = "Autocomplete"
field.options = get_link_options(
web_form_name,
field.options,
field.allow_read_on_all_link_options
)
return out
@frappe.whitelist()
def get_in_list_view_fields(doctype):
return [df.as_dict() for df in frappe.get_meta(doctype).fields if df.in_list_view]
@frappe.whitelist(allow_guest=True)
def get_link_options(web_form_name, doctype, allow_read_on_all_link_options=False):
web_form_doc = frappe.get_doc("Web Form", web_form_name)
doctype_validated = False
limited_to_user = False
if web_form_doc.login_required:
# check if frappe session user is not guest or admin
if frappe.session.user != 'Guest':
doctype_validated = True
if not allow_read_on_all_link_options:
limited_to_user = True
else:
for field in web_form_doc.web_form_fields:
if field.options == doctype:
doctype_validated = True
break
if doctype_validated:
link_options = []
if limited_to_user:
link_options = "\n".join([doc.name for doc in frappe.get_all(doctype, filters = {"owner":frappe.session.user})])
else:
link_options = "\n".join([doc.name for doc in frappe.get_all(doctype)])
return link_options
else:
raise frappe.PermissionError('Not Allowed, {0}'.format(doctype))
| {
"content_hash": "0a0101bd960fa9d47ecbac771eb1ee54",
"timestamp": "",
"source": "github",
"line_count": 592,
"max_line_length": 173,
"avg_line_length": 29.716216216216218,
"alnum_prop": 0.6928149158708504,
"repo_name": "RicardoJohann/frappe",
"id": "c8daef95e288d8f04b701c78ac6927c7330dcffd",
"size": "17712",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/website/doctype/web_form/web_form.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "447183"
},
{
"name": "HTML",
"bytes": "199549"
},
{
"name": "JavaScript",
"bytes": "2009239"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "2338007"
},
{
"name": "Shell",
"bytes": "2296"
},
{
"name": "Vue",
"bytes": "24090"
}
],
"symlink_target": ""
} |
"""
SQLite3 backend for django.
Python 2.3 and 2.4 require pysqlite2 (http://pysqlite.org/).
Python 2.5 and later can use a pysqlite2 module or the sqlite3 module in the
standard library.
"""
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.sqlite3.client import DatabaseClient
from django.db.backends.sqlite3.creation import DatabaseCreation
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
from django.utils.safestring import SafeString
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError, e1:
from sqlite3 import dbapi2 as Database
except ImportError, exc:
import sys
from django.core.exceptions import ImproperlyConfigured
if sys.version_info < (2, 5, 0):
module = 'pysqlite2 module'
exc = e1
else:
module = 'either pysqlite2 or sqlite3 modules (tried in that order)'
raise ImproperlyConfigured, "Error loading %s: %s" % (module, exc)
try:
import decimal
except ImportError:
from django.utils import _decimal as decimal # for Python 2.3
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
Database.register_converter("bool", lambda s: str(s) == '1')
Database.register_converter("time", util.typecast_time)
Database.register_converter("date", util.typecast_date)
Database.register_converter("datetime", util.typecast_timestamp)
Database.register_converter("timestamp", util.typecast_timestamp)
Database.register_converter("TIMESTAMP", util.typecast_timestamp)
Database.register_converter("decimal", util.typecast_decimal)
Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal)
if Database.version_info >= (2,4,1):
# Starting in 2.4.1, the str type is not accepted anymore, therefore,
# we convert all str objects to Unicode
# As registering a adapter for a primitive type causes a small
# slow-down, this adapter is only registered for sqlite3 versions
# needing it.
Database.register_adapter(str, lambda s:s.decode('utf-8'))
Database.register_adapter(SafeString, lambda s:s.decode('utf-8'))
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
class DatabaseOperations(BaseDatabaseOperations):
def date_extract_sql(self, lookup_type, field_name):
# sqlite doesn't support extract, so we fake it with the user-defined
# function django_extract that's registered in connect().
return 'django_extract("%s", %s)' % (lookup_type.lower(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
# function django_date_trunc that's registered in connect().
return 'django_date_trunc("%s", %s)' % (lookup_type.lower(), field_name)
def drop_foreignkey_sql(self):
return ""
def pk_default_value(self):
return 'NULL'
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def no_limit_value(self):
return -1
def sql_flush(self, style, tables, sequences):
# NB: The generated SQL below is specific to SQLite
# Note: The DELETE FROM... SQL generated below works for SQLite databases
# because constraints don't exist
sql = ['%s %s %s;' % \
(style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# Note: No requirement for reset of auto-incremented indices (cf. other
# sql_flush() implementations). Just return SQL at this point
return sql
def year_lookup_bounds(self, value):
first = '%s-01-01'
second = '%s-12-31 23:59:59.999999'
return [first % value, second % value]
def convert_values(self, value, field):
"""SQLite returns floats when it should be returning decimals,
and gets dates and datetimes wrong.
For consistency with other backends, coerce when required.
"""
internal_type = field.get_internal_type()
if internal_type == 'DecimalField':
return util.typecast_decimal(field.format_number(value))
elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField':
return int(value)
elif internal_type == 'DateField':
return util.typecast_date(value)
elif internal_type == 'DateTimeField':
return util.typecast_timestamp(value)
elif internal_type == 'TimeField':
return util.typecast_time(value)
# No field, or the field isn't known to be a decimal or integer
return value
class DatabaseWrapper(BaseDatabaseWrapper):
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures()
self.ops = DatabaseOperations()
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation()
def _cursor(self):
if self.connection is None:
settings_dict = self.settings_dict
if not settings_dict['DATABASE_NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "Please fill out DATABASE_NAME in the settings module before using the database."
kwargs = {
'database': settings_dict['DATABASE_NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['DATABASE_OPTIONS'])
self.connection = Database.connect(**kwargs)
# Register extract, date_trunc, and regexp functions.
self.connection.create_function("django_extract", 2, _sqlite_extract)
self.connection.create_function("django_date_trunc", 2, _sqlite_date_trunc)
self.connection.create_function("regexp", 2, _sqlite_regexp)
connection_created.send(sender=self.__class__)
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if self.settings_dict['DATABASE_NAME'] != ":memory:":
BaseDatabaseWrapper.close(self)
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=()):
query = self.convert_query(query, len(params))
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
try:
query = self.convert_query(query, len(param_list[0]))
return Database.Cursor.executemany(self, query, param_list)
except (IndexError,TypeError):
# No parameter list provided
return None
def convert_query(self, query, num_params):
return query % tuple("?" * num_params)
def _sqlite_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
def _sqlite_regexp(re_pattern, re_string):
import re
try:
return bool(re.search(re_pattern, re_string))
except:
return False
| {
"content_hash": "29cec0ab86291939ebc5930b93363824",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 125,
"avg_line_length": 40.714893617021275,
"alnum_prop": 0.6292851170568562,
"repo_name": "greggian/TapdIn",
"id": "e8f4cb53b0de17546b99f45fc3949211b5da454f",
"size": "9568",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/db/backends/sqlite3/base.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "82525"
},
{
"name": "Python",
"bytes": "3585862"
},
{
"name": "Shell",
"bytes": "227"
}
],
"symlink_target": ""
} |
'''Unit tests for the twitter.py library'''
__author__ = '[email protected]'
import os
import simplejson
import time
import unittest
import twitter
class StatusTest(unittest.TestCase):
SAMPLE_JSON = '''{"created_at": "Fri Jan 26 23:17:14 +0000 2007", "id": 4391023, "text": "A l\u00e9gp\u00e1rn\u00e1s haj\u00f3m tele van angoln\u00e1kkal.", "user": {"description": "Canvas. JC Penny. Three ninety-eight.", "id": 718443, "location": "Okinawa, Japan", "name": "Kesuke Miyagi", "profile_image_url": "http:\/\/twitter.com\/system\/user\/profile_image\/718443\/normal\/kesuke.png", "screen_name": "kesuke", "url": "http:\/\/twitter.com\/kesuke"}}'''
def _GetSampleUser(self):
return twitter.User(id=718443,
name='Kesuke Miyagi',
screen_name='kesuke',
description=u'Canvas. JC Penny. Three ninety-eight.',
location='Okinawa, Japan',
url='http://twitter.com/kesuke',
profile_image_url='http://twitter.com/system/user/pro'
'file_image/718443/normal/kesuke.pn'
'g')
def _GetSampleStatus(self):
return twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007',
id=4391023,
text=u'A légpárnás hajóm tele van angolnákkal.',
user=self._GetSampleUser())
def testInit(self):
'''Test the twitter.Status constructor'''
status = twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007',
id=4391023,
text=u'A légpárnás hajóm tele van angolnákkal.',
user=self._GetSampleUser())
def testGettersAndSetters(self):
'''Test all of the twitter.Status getters and setters'''
status = twitter.Status()
status.SetId(4391023)
self.assertEqual(4391023, status.GetId())
created_at = time.mktime((2007, 1, 26, 23, 17, 14, -1, -1, -1))
status.SetCreatedAt('Fri Jan 26 23:17:14 +0000 2007')
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.GetCreatedAt())
self.assertEqual(created_at, status.GetCreatedAtInSeconds())
status.SetNow(created_at + 10)
self.assertEqual("about 10 seconds ago", status.GetRelativeCreatedAt())
status.SetText(u'A légpárnás hajóm tele van angolnákkal.')
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.',
status.GetText())
status.SetUser(self._GetSampleUser())
self.assertEqual(718443, status.GetUser().id)
def testProperties(self):
'''Test all of the twitter.Status properties'''
status = twitter.Status()
status.id = 1
self.assertEqual(1, status.id)
created_at = time.mktime((2007, 1, 26, 23, 17, 14, -1, -1, -1))
status.created_at = 'Fri Jan 26 23:17:14 +0000 2007'
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.created_at)
self.assertEqual(created_at, status.created_at_in_seconds)
status.now = created_at + 10
self.assertEqual('about 10 seconds ago', status.relative_created_at)
status.user = self._GetSampleUser()
self.assertEqual(718443, status.user.id)
def _ParseDate(self, string):
return time.mktime(time.strptime(string, '%b %d %H:%M:%S %Y'))
def testRelativeCreatedAt(self):
'''Test various permutations of Status relative_created_at'''
status = twitter.Status(created_at='Fri Jan 01 12:00:00 +0000 2007')
status.now = self._ParseDate('Jan 01 12:00:00 2007')
self.assertEqual('about a second ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:01 2007')
self.assertEqual('about a second ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:02 2007')
self.assertEqual('about 2 seconds ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:05 2007')
self.assertEqual('about 5 seconds ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:00:50 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:01:00 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:01:10 2007')
self.assertEqual('about a minute ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:02:00 2007')
self.assertEqual('about 2 minutes ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:31:50 2007')
self.assertEqual('about 31 minutes ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 12:50:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 13:00:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 13:10:00 2007')
self.assertEqual('about an hour ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 14:00:00 2007')
self.assertEqual('about 2 hours ago', status.relative_created_at)
status.now = self._ParseDate('Jan 01 19:00:00 2007')
self.assertEqual('about 7 hours ago', status.relative_created_at)
status.now = self._ParseDate('Jan 02 11:30:00 2007')
self.assertEqual('about a day ago', status.relative_created_at)
status.now = self._ParseDate('Jan 04 12:00:00 2007')
self.assertEqual('about 3 days ago', status.relative_created_at)
status.now = self._ParseDate('Feb 04 12:00:00 2007')
self.assertEqual('about 34 days ago', status.relative_created_at)
def testAsJsonString(self):
'''Test the twitter.Status AsJsonString method'''
self.assertEqual(StatusTest.SAMPLE_JSON,
self._GetSampleStatus().AsJsonString())
def testAsDict(self):
'''Test the twitter.Status AsDict method'''
status = self._GetSampleStatus()
data = status.AsDict()
self.assertEqual(4391023, data['id'])
self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', data['created_at'])
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', data['text'])
self.assertEqual(718443, data['user']['id'])
def testEq(self):
'''Test the twitter.Status __eq__ method'''
status = twitter.Status()
status.created_at = 'Fri Jan 26 23:17:14 +0000 2007'
status.id = 4391023
status.text = u'A légpárnás hajóm tele van angolnákkal.'
status.user = self._GetSampleUser()
self.assertEqual(status, self._GetSampleStatus())
def testNewFromJsonDict(self):
'''Test the twitter.Status NewFromJsonDict method'''
data = simplejson.loads(StatusTest.SAMPLE_JSON)
status = twitter.Status.NewFromJsonDict(data)
self.assertEqual(self._GetSampleStatus(), status)
class UserTest(unittest.TestCase):
SAMPLE_JSON = '''{"description": "Indeterminate things", "id": 673483, "location": "San Francisco, CA", "name": "DeWitt", "profile_image_url": "http:\/\/twitter.com\/system\/user\/profile_image\/673483\/normal\/me.jpg", "screen_name": "dewitt", "status": {"created_at": "Fri Jan 26 17:28:19 +0000 2007", "id": 4212713, "text": "\\"Select all\\" and archive your Gmail inbox. The page loads so much faster!"}, "url": "http:\/\/unto.net\/"}'''
def _GetSampleStatus(self):
return twitter.Status(created_at='Fri Jan 26 17:28:19 +0000 2007',
id=4212713,
text='"Select all" and archive your Gmail inbox. '
' The page loads so much faster!')
def _GetSampleUser(self):
return twitter.User(id=673483,
name='DeWitt',
screen_name='dewitt',
description=u'Indeterminate things',
location='San Francisco, CA',
url='http://unto.net/',
profile_image_url='http://twitter.com/system/user/prof'
'ile_image/673483/normal/me.jpg',
status=self._GetSampleStatus())
def testInit(self):
'''Test the twitter.User constructor'''
user = twitter.User(id=673483,
name='DeWitt',
screen_name='dewitt',
description=u'Indeterminate things',
url='http://twitter.com/dewitt',
profile_image_url='http://twitter.com/system/user/prof'
'ile_image/673483/normal/me.jpg',
status=self._GetSampleStatus())
def testGettersAndSetters(self):
'''Test all of the twitter.User getters and setters'''
user = twitter.User()
user.SetId(673483)
self.assertEqual(673483, user.GetId())
user.SetName('DeWitt')
self.assertEqual('DeWitt', user.GetName())
user.SetScreenName('dewitt')
self.assertEqual('dewitt', user.GetScreenName())
user.SetDescription('Indeterminate things')
self.assertEqual('Indeterminate things', user.GetDescription())
user.SetLocation('San Francisco, CA')
self.assertEqual('San Francisco, CA', user.GetLocation())
user.SetProfileImageUrl('http://twitter.com/system/user/profile_im'
'age/673483/normal/me.jpg')
self.assertEqual('http://twitter.com/system/user/profile_image/673'
'483/normal/me.jpg', user.GetProfileImageUrl())
user.SetStatus(self._GetSampleStatus())
self.assertEqual(4212713, user.GetStatus().id)
def testProperties(self):
'''Test all of the twitter.User properties'''
user = twitter.User()
user.id = 673483
self.assertEqual(673483, user.id)
user.name = 'DeWitt'
self.assertEqual('DeWitt', user.name)
user.screen_name = 'dewitt'
self.assertEqual('dewitt', user.screen_name)
user.description = 'Indeterminate things'
self.assertEqual('Indeterminate things', user.description)
user.location = 'San Francisco, CA'
self.assertEqual('San Francisco, CA', user.location)
user.profile_image_url = 'http://twitter.com/system/user/profile_i' \
'mage/673483/normal/me.jpg'
self.assertEqual('http://twitter.com/system/user/profile_image/6734'
'83/normal/me.jpg', user.profile_image_url)
self.status = self._GetSampleStatus()
self.assertEqual(4212713, self.status.id)
def testAsJsonString(self):
'''Test the twitter.User AsJsonString method'''
self.assertEqual(UserTest.SAMPLE_JSON,
self._GetSampleUser().AsJsonString())
def testAsDict(self):
'''Test the twitter.User AsDict method'''
user = self._GetSampleUser()
data = user.AsDict()
self.assertEqual(673483, data['id'])
self.assertEqual('DeWitt', data['name'])
self.assertEqual('dewitt', data['screen_name'])
self.assertEqual('Indeterminate things', data['description'])
self.assertEqual('San Francisco, CA', data['location'])
self.assertEqual('http://twitter.com/system/user/profile_image/6734'
'83/normal/me.jpg', data['profile_image_url'])
self.assertEqual('http://unto.net/', data['url'])
self.assertEqual(4212713, data['status']['id'])
def testEq(self):
'''Test the twitter.User __eq__ method'''
user = twitter.User()
user.id = 673483
user.name = 'DeWitt'
user.screen_name = 'dewitt'
user.description = 'Indeterminate things'
user.location = 'San Francisco, CA'
user.profile_image_url = 'http://twitter.com/system/user/profile_image/67' \
'3483/normal/me.jpg'
user.url = 'http://unto.net/'
user.status = self._GetSampleStatus()
self.assertEqual(user, self._GetSampleUser())
def testNewFromJsonDict(self):
'''Test the twitter.User NewFromJsonDict method'''
data = simplejson.loads(UserTest.SAMPLE_JSON)
user = twitter.User.NewFromJsonDict(data)
self.assertEqual(self._GetSampleUser(), user)
class FileCacheTest(unittest.TestCase):
def testInit(self):
"""Test the twitter._FileCache constructor"""
cache = twitter._FileCache()
self.assert_(cache is not None, 'cache is None')
def testSet(self):
"""Test the twitter._FileCache.Set method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cache.Remove("foo")
def testRemove(self):
"""Test the twitter._FileCache.Remove method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cache.Remove("foo")
data = cache.Get("foo")
self.assertEqual(data, None, 'data is not None')
def testGet(self):
"""Test the twitter._FileCache.Get method"""
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
data = cache.Get("foo")
self.assertEqual('Hello World!', data)
cache.Remove("foo")
def testGetCachedTime(self):
"""Test the twitter._FileCache.GetCachedTime method"""
now = time.time()
cache = twitter._FileCache()
cache.Set("foo",'Hello World!')
cached_time = cache.GetCachedTime("foo")
delta = cached_time - now
self.assert_(delta <= 1,
'Cached time differs from clock time by more than 1 second.')
cache.Remove("foo")
class ApiTest(unittest.TestCase):
def setUp(self):
self._urllib = MockUrllib()
api = twitter.Api(username='test', password='test')
api.SetCache(NullCache())
api.SetUrllib(self._urllib)
self._api = api
def testGetPublicTimeline(self):
'''Test the twitter.Api GetPublicTimeline method'''
self._AddHandler('http://twitter.com/statuses/public_timeline.json?since_id=12345',
curry(self._OpenTestData, 'public_timeline.json'))
statuses = self._api.GetPublicTimeline(since_id=12345)
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(20, len(statuses))
self.assertEqual(89497702, statuses[0].id)
def testGetUserTimeline(self):
'''Test the twitter.Api GetUserTimeline method'''
self._AddHandler('http://twitter.com/statuses/user_timeline/kesuke.json?count=1&since=Tue%2C+27+Mar+2007+22%3A55%3A48+GMT',
curry(self._OpenTestData, 'user_timeline-kesuke.json'))
statuses = self._api.GetUserTimeline('kesuke', count=1, since='Tue, 27 Mar 2007 22:55:48 GMT')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(89512102, statuses[0].id)
self.assertEqual(718443, statuses[0].user.id)
def testGetFriendsTimeline(self):
'''Test the twitter.Api GetFriendsTimeline method'''
self._AddHandler('http://twitter.com/statuses/friends_timeline/kesuke.json',
curry(self._OpenTestData, 'friends_timeline-kesuke.json'))
statuses = self._api.GetFriendsTimeline('kesuke')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(20, len(statuses))
self.assertEqual(718443, statuses[0].user.id)
def testGetStatus(self):
'''Test the twitter.Api GetStatus method'''
self._AddHandler('http://twitter.com/statuses/show/89512102.json',
curry(self._OpenTestData, 'show-89512102.json'))
status = self._api.GetStatus(89512102)
self.assertEqual(89512102, status.id)
self.assertEqual(718443, status.user.id)
def testDestroyStatus(self):
'''Test the twitter.Api DestroyStatus method'''
self._AddHandler('http://twitter.com/statuses/destroy/103208352.json',
curry(self._OpenTestData, 'status-destroy.json'))
status = self._api.DestroyStatus(103208352)
self.assertEqual(103208352, status.id)
def testPostUpdate(self):
'''Test the twitter.Api PostUpdate method'''
self._AddHandler('http://twitter.com/statuses/update.json',
curry(self._OpenTestData, 'update.json'))
status = self._api.PostUpdate(u'Моё судно на воздушной подушке полно угрей')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text)
def testGetReplies(self):
'''Test the twitter.Api GetReplies method'''
self._AddHandler('http://twitter.com/statuses/replies.json',
curry(self._OpenTestData, 'replies.json'))
statuses = self._api.GetReplies()
self.assertEqual(36657062, statuses[0].id)
def testGetFriends(self):
'''Test the twitter.Api GetFriends method'''
self._AddHandler('http://twitter.com/statuses/friends.json',
curry(self._OpenTestData, 'friends.json'))
users = self._api.GetFriends()
buzz = [u.status for u in users if u.screen_name == 'buzz']
self.assertEqual(89543882, buzz[0].id)
def testGetFollowers(self):
'''Test the twitter.Api GetFollowers method'''
self._AddHandler('http://twitter.com/statuses/followers.json',
curry(self._OpenTestData, 'followers.json'))
users = self._api.GetFollowers()
# This is rather arbitrary, but spot checking is better than nothing
alexkingorg = [u.status for u in users if u.screen_name == 'alexkingorg']
self.assertEqual(89554432, alexkingorg[0].id)
def testGetFeatured(self):
'''Test the twitter.Api GetFeatured method'''
self._AddHandler('http://twitter.com/statuses/featured.json',
curry(self._OpenTestData, 'featured.json'))
users = self._api.GetFeatured()
# This is rather arbitrary, but spot checking is better than nothing
stevenwright = [u.status for u in users if u.screen_name == 'stevenwright']
self.assertEqual(86991742, stevenwright[0].id)
def testGetDirectMessages(self):
'''Test the twitter.Api GetDirectMessages method'''
self._AddHandler('http://twitter.com/direct_messages.json',
curry(self._OpenTestData, 'direct_messages.json'))
statuses = self._api.GetDirectMessages()
self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', statuses[0].text)
def testPostDirectMessage(self):
'''Test the twitter.Api PostDirectMessage method'''
self._AddHandler('http://twitter.com/direct_messages/new.json',
curry(self._OpenTestData, 'direct_messages-new.json'))
status = self._api.PostDirectMessage('test', u'Моё судно на воздушной подушке полно угрей')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text)
def testDestroyDirectMessage(self):
'''Test the twitter.Api DestroyDirectMessage method'''
self._AddHandler('http://twitter.com/direct_messages/destroy/3496342.json',
curry(self._OpenTestData, 'direct_message-destroy.json'))
status = self._api.DestroyDirectMessage(3496342)
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, status.sender_id)
def testCreateFriendship(self):
'''Test the twitter.Api CreateFriendship method'''
self._AddHandler('http://twitter.com/friendships/create/dewitt.json',
curry(self._OpenTestData, 'friendship-create.json'))
user = self._api.CreateFriendship('dewitt')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, user.id)
def testDestroyFriendship(self):
'''Test the twitter.Api DestroyFriendship method'''
self._AddHandler('http://twitter.com/friendships/destroy/dewitt.json',
curry(self._OpenTestData, 'friendship-destroy.json'))
user = self._api.DestroyFriendship('dewitt')
# This is rather arbitrary, but spot checking is better than nothing
self.assertEqual(673483, user.id)
def testGetUser(self):
'''Test the twitter.Api GetUser method'''
self._AddHandler('http://twitter.com/users/show/dewitt.json',
curry(self._OpenTestData, 'show-dewitt.json'))
user = self._api.GetUser('dewitt')
self.assertEqual('dewitt', user.screen_name)
self.assertEqual(89586072, user.status.id)
def _AddHandler(self, url, callback):
self._urllib.AddHandler(url, callback)
def _GetTestDataPath(self, filename):
directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(directory, 'testdata')
return os.path.join(test_data_dir, filename)
def _OpenTestData(self, filename):
return open(self._GetTestDataPath(filename))
class MockUrllib(object):
'''A mock replacement for urllib that hardcodes specific responses.'''
def __init__(self):
self._handlers = {}
self.HTTPBasicAuthHandler = MockHTTPBasicAuthHandler
def AddHandler(self, url, callback):
self._handlers[url] = callback
def build_opener(self, *handlers):
return MockOpener(self._handlers)
class MockOpener(object):
'''A mock opener for urllib'''
def __init__(self, handlers):
self._handlers = handlers
def open(self, url, data=None):
if url in self._handlers:
return self._handlers[url]()
else:
raise Exception('Unexpected URL %s' % url)
class MockHTTPBasicAuthHandler(object):
'''A mock replacement for HTTPBasicAuthHandler'''
def add_password(self, realm, uri, user, passwd):
# TODO(dewitt): Add verification that the proper args are passed
pass
class NullCache(object):
'''A no-op replacement for the cache class'''
def Get(self, key):
return None
def Set(self, key, data):
pass
def Remove(self, key):
pass
def GetCachedTime(self, key):
return None
class curry:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549
def __init__(self, fun, *args, **kwargs):
self.fun = fun
self.pending = args[:]
self.kwargs = kwargs.copy()
def __call__(self, *args, **kwargs):
if kwargs and self.kwargs:
kw = self.kwargs.copy()
kw.update(kwargs)
else:
kw = kwargs or self.kwargs
return self.fun(*(self.pending + args), **kw)
def suite():
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(FileCacheTest))
suite.addTests(unittest.makeSuite(StatusTest))
suite.addTests(unittest.makeSuite(UserTest))
suite.addTests(unittest.makeSuite(ApiTest))
return suite
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "c4ad37313a01f97de1aef619ee9c7824",
"timestamp": "",
"source": "github",
"line_count": 520,
"max_line_length": 462,
"avg_line_length": 42.80384615384615,
"alnum_prop": 0.6567975559349447,
"repo_name": "google-code-export/django-hotclub",
"id": "7eca10083eeb7776e8030b3b20810cd383310f53",
"size": "22534",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "libs/external_libs/python-twitter-0.5/twitter_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "4084"
},
{
"name": "ApacheConf",
"bytes": "20791"
},
{
"name": "Assembly",
"bytes": "3294"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "C",
"bytes": "146718"
},
{
"name": "C#",
"bytes": "16949"
},
{
"name": "C++",
"bytes": "79372"
},
{
"name": "CSS",
"bytes": "147815"
},
{
"name": "Clean",
"bytes": "2878"
},
{
"name": "Clojure",
"bytes": "21964"
},
{
"name": "Common Lisp",
"bytes": "48874"
},
{
"name": "D",
"bytes": "5475"
},
{
"name": "Diff",
"bytes": "10634"
},
{
"name": "Dylan",
"bytes": "683"
},
{
"name": "Emacs Lisp",
"bytes": "29569"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "FORTRAN",
"bytes": "27700"
},
{
"name": "Genshi",
"bytes": "2298"
},
{
"name": "Gettext Catalog",
"bytes": "764716"
},
{
"name": "Gnuplot",
"bytes": "10376"
},
{
"name": "Groff",
"bytes": "47103"
},
{
"name": "HTML",
"bytes": "8286203"
},
{
"name": "Haskell",
"bytes": "40419"
},
{
"name": "Java",
"bytes": "81989"
},
{
"name": "JavaScript",
"bytes": "74222"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "60193"
},
{
"name": "Matlab",
"bytes": "469"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "Myghty",
"bytes": "4713"
},
{
"name": "Objective-C",
"bytes": "778"
},
{
"name": "PHP",
"bytes": "17078"
},
{
"name": "Pascal",
"bytes": "84519"
},
{
"name": "Perl",
"bytes": "32503"
},
{
"name": "Python",
"bytes": "7043260"
},
{
"name": "R",
"bytes": "3468"
},
{
"name": "Redcode",
"bytes": "830"
},
{
"name": "Ruby",
"bytes": "91160"
},
{
"name": "Scala",
"bytes": "138"
},
{
"name": "Scheme",
"bytes": "45856"
},
{
"name": "Shell",
"bytes": "119136"
},
{
"name": "Smalltalk",
"bytes": "16163"
},
{
"name": "Standard ML",
"bytes": "42416"
},
{
"name": "TeX",
"bytes": "77612"
},
{
"name": "VimL",
"bytes": "16660"
},
{
"name": "Visual Basic",
"bytes": "846"
},
{
"name": "XSLT",
"bytes": "755"
},
{
"name": "mupad",
"bytes": "2434"
}
],
"symlink_target": ""
} |
import logging
import pickle
from ray.rllib.utils.annotations import PublicAPI
logger = logging.getLogger(__name__)
try:
import requests # `requests` is not part of stdlib.
except ImportError:
requests = None
logger.warning(
"Couldn't import `requests` library. Be sure to install it on"
" the client side.")
@PublicAPI
class PolicyClient:
"""REST client to interact with a RLlib policy server."""
START_EPISODE = "START_EPISODE"
GET_ACTION = "GET_ACTION"
LOG_ACTION = "LOG_ACTION"
LOG_RETURNS = "LOG_RETURNS"
END_EPISODE = "END_EPISODE"
@PublicAPI
def __init__(self, address):
self._address = address
@PublicAPI
def start_episode(self, episode_id=None, training_enabled=True):
"""Record the start of an episode.
Arguments:
episode_id (str): Unique string id for the episode or None for
it to be auto-assigned.
training_enabled (bool): Whether to use experiences for this
episode to improve the policy.
Returns:
episode_id (str): Unique string id for the episode.
"""
return self._send({
"episode_id": episode_id,
"command": PolicyClient.START_EPISODE,
"training_enabled": training_enabled,
})["episode_id"]
@PublicAPI
def get_action(self, episode_id, observation):
"""Record an observation and get the on-policy action.
Arguments:
episode_id (str): Episode id returned from start_episode().
observation (obj): Current environment observation.
Returns:
action (obj): Action from the env action space.
"""
return self._send({
"command": PolicyClient.GET_ACTION,
"observation": observation,
"episode_id": episode_id,
})["action"]
@PublicAPI
def log_action(self, episode_id, observation, action):
"""Record an observation and (off-policy) action taken.
Arguments:
episode_id (str): Episode id returned from start_episode().
observation (obj): Current environment observation.
action (obj): Action for the observation.
"""
self._send({
"command": PolicyClient.LOG_ACTION,
"observation": observation,
"action": action,
"episode_id": episode_id,
})
@PublicAPI
def log_returns(self, episode_id, reward, info=None):
"""Record returns from the environment.
The reward will be attributed to the previous action taken by the
episode. Rewards accumulate until the next action. If no reward is
logged before the next action, a reward of 0.0 is assumed.
Arguments:
episode_id (str): Episode id returned from start_episode().
reward (float): Reward from the environment.
"""
self._send({
"command": PolicyClient.LOG_RETURNS,
"reward": reward,
"info": info,
"episode_id": episode_id,
})
@PublicAPI
def end_episode(self, episode_id, observation):
"""Record the end of an episode.
Arguments:
episode_id (str): Episode id returned from start_episode().
observation (obj): Current environment observation.
"""
self._send({
"command": PolicyClient.END_EPISODE,
"observation": observation,
"episode_id": episode_id,
})
def _send(self, data):
payload = pickle.dumps(data)
response = requests.post(self._address, data=payload)
if response.status_code != 200:
logger.error("Request failed {}: {}".format(response.text, data))
response.raise_for_status()
parsed = pickle.loads(response.content)
return parsed
| {
"content_hash": "06a9d66d86c753df1fed187f35a750f7",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 77,
"avg_line_length": 31.532258064516128,
"alnum_prop": 0.591304347826087,
"repo_name": "stephanie-wang/ray",
"id": "6071c258c33741d03d41a77872906eb6e1ff0974",
"size": "3910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rllib/utils/policy_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "29882"
},
{
"name": "C++",
"bytes": "2149909"
},
{
"name": "CSS",
"bytes": "8025"
},
{
"name": "Dockerfile",
"bytes": "5499"
},
{
"name": "Go",
"bytes": "28481"
},
{
"name": "HTML",
"bytes": "30435"
},
{
"name": "Java",
"bytes": "738348"
},
{
"name": "JavaScript",
"bytes": "444"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "1965"
},
{
"name": "Python",
"bytes": "4058862"
},
{
"name": "Shell",
"bytes": "88736"
},
{
"name": "Starlark",
"bytes": "121207"
},
{
"name": "TypeScript",
"bytes": "64161"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from django.utils.datastructures import SortedDict
from cloud.api import neutron
neutronclient = neutron.neutronclient
class Vip(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron load balancer vip."""
def __init__(self, apiresource):
super(Vip, self).__init__(apiresource)
class Pool(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron load balancer pool."""
def __init__(self, apiresource):
if 'provider' not in apiresource:
apiresource['provider'] = None
super(Pool, self).__init__(apiresource)
class Member(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron load balancer member."""
def __init__(self, apiresource):
super(Member, self).__init__(apiresource)
class PoolStats(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron load balancer pool stats."""
def __init__(self, apiresource):
super(PoolStats, self).__init__(apiresource)
class PoolMonitor(neutron.NeutronAPIDictWrapper):
"""Wrapper for neutron load balancer pool health monitor."""
def __init__(self, apiresource):
super(PoolMonitor, self).__init__(apiresource)
def vip_create(request, **kwargs):
"""Create a vip for a specified pool.
:param request: request context
:param address: virtual IP address
:param name: name for vip
:param description: description for vip
:param subnet_id: subnet_id for subnet of vip
:param protocol_port: transport layer port number for vip
:returns: Vip object
"""
body = {'vip': {'name': kwargs['name'],
'description': kwargs['description'],
'subnet_id': kwargs['subnet_id'],
'protocol_port': kwargs['protocol_port'],
'protocol': kwargs['protocol'],
'pool_id': kwargs['pool_id'],
'session_persistence': kwargs['session_persistence'],
'admin_state_up': kwargs['admin_state_up']
}}
if kwargs.get('connection_limit'):
body['vip']['connection_limit'] = kwargs['connection_limit']
if kwargs.get('address'):
body['vip']['address'] = kwargs['address']
vip = neutronclient(request).create_vip(body).get('vip')
return Vip(vip)
def vip_list(request, **kwargs):
vips = neutronclient(request).list_vips(**kwargs).get('vips')
return [Vip(v) for v in vips]
def vip_get(request, vip_id):
return _vip_get(request, vip_id, expand_resource=True)
def _vip_get(request, vip_id, expand_resource=False):
vip = neutronclient(request).show_vip(vip_id).get('vip')
if expand_resource:
vip['subnet'] = neutron.subnet_get(request, vip['subnet_id'])
vip['port'] = neutron.port_get(request, vip['port_id'])
vip['pool'] = _pool_get(request, vip['pool_id'])
return Vip(vip)
def vip_update(request, vip_id, **kwargs):
vip = neutronclient(request).update_vip(vip_id, kwargs).get('vip')
return Vip(vip)
def vip_delete(request, vip_id):
neutronclient(request).delete_vip(vip_id)
def pool_create(request, **kwargs):
"""Create a pool for specified protocol
:param request: request context
:param name: name for pool
:param description: description for pool
:param subnet_id: subnet_id for subnet of pool
:param protocol: load balanced protocol
:param lb_method: load balancer method
:param admin_state_up: admin state (default on)
"""
body = {'pool': {'name': kwargs['name'],
'description': kwargs['description'],
'subnet_id': kwargs['subnet_id'],
'protocol': kwargs['protocol'],
'lb_method': kwargs['lb_method'],
'admin_state_up': kwargs['admin_state_up'],
'provider': kwargs['provider'],
}}
pool = neutronclient(request).create_pool(body).get('pool')
return Pool(pool)
def _get_vip(request, pool, vip_dict, expand_name_only=False):
if pool['vip_id'] is not None:
try:
if vip_dict:
vip = vip_dict.get(pool['vip_id'])
else:
vip = _vip_get(request, pool['vip_id'])
except Exception:
vip = Vip({'id': pool['vip_id'], 'name': ''})
if expand_name_only:
vip = vip.name_or_id
return vip
else:
return None
def pool_list(request, **kwargs):
return _pool_list(request, expand_subnet=True, expand_vip=True, **kwargs)
def _pool_list(request, expand_subnet=False, expand_vip=False, **kwargs):
pools = neutronclient(request).list_pools(**kwargs).get('pools')
if expand_subnet:
subnets = neutron.subnet_list(request)
subnet_dict = SortedDict((s.id, s) for s in subnets)
for p in pools:
subnet = subnet_dict.get(p['subnet_id'])
p['subnet_name'] = subnet.cidr if subnet else None
if expand_vip:
vips = vip_list(request)
vip_dict = SortedDict((v.id, v) for v in vips)
for p in pools:
p['vip_name'] = _get_vip(request, p, vip_dict,
expand_name_only=True)
return [Pool(p) for p in pools]
def pool_get(request, pool_id):
return _pool_get(request, pool_id, expand_resource=True)
def _pool_get(request, pool_id, expand_resource=False):
pool = neutronclient(request).show_pool(pool_id).get('pool')
if expand_resource:
pool['subnet'] = neutron.subnet_get(request, pool['subnet_id'])
pool['vip'] = _get_vip(request, pool, vip_dict=None,
expand_name_only=False)
pool['members'] = _member_list(request, expand_pool=False,
pool_id=pool_id)
pool['health_monitors'] = pool_health_monitor_list(
request, id=pool['health_monitors'])
return Pool(pool)
def pool_update(request, pool_id, **kwargs):
pool = neutronclient(request).update_pool(pool_id, kwargs).get('pool')
return Pool(pool)
def pool_delete(request, pool):
neutronclient(request).delete_pool(pool)
# not linked to UI yet
def pool_stats(request, pool_id, **kwargs):
stats = neutronclient(request).retrieve_pool_stats(pool_id, **kwargs)
return PoolStats(stats)
def pool_health_monitor_create(request, **kwargs):
"""Create a health monitor
:param request: request context
:param type: type of monitor
:param delay: delay of monitor
:param timeout: timeout of monitor
:param max_retries: max retries [1..10]
:param http_method: http method
:param url_path: url path
:param expected_codes: http return code
:param admin_state_up: admin state
"""
monitor_type = kwargs['type'].upper()
body = {'health_monitor': {'type': monitor_type,
'delay': kwargs['delay'],
'timeout': kwargs['timeout'],
'max_retries': kwargs['max_retries'],
'admin_state_up': kwargs['admin_state_up']
}}
if monitor_type in ['HTTP', 'HTTPS']:
body['health_monitor']['http_method'] = kwargs['http_method']
body['health_monitor']['url_path'] = kwargs['url_path']
body['health_monitor']['expected_codes'] = kwargs['expected_codes']
mon = neutronclient(request).create_health_monitor(body).get(
'health_monitor')
return PoolMonitor(mon)
def pool_health_monitor_list(request, **kwargs):
monitors = neutronclient(request).list_health_monitors(
**kwargs).get('health_monitors')
return [PoolMonitor(m) for m in monitors]
def pool_health_monitor_get(request, monitor_id):
return _pool_health_monitor_get(request, monitor_id, expand_resource=True)
def _pool_health_monitor_get(request, monitor_id, expand_resource=False):
monitor = neutronclient(request
).show_health_monitor(monitor_id
).get('health_monitor')
if expand_resource:
pool_ids = [p['pool_id'] for p in monitor['pools']]
monitor['pools'] = _pool_list(request, id=pool_ids)
return PoolMonitor(monitor)
def pool_health_monitor_update(request, monitor_id, **kwargs):
monitor = neutronclient(request).update_health_monitor(monitor_id, kwargs)
return PoolMonitor(monitor)
def pool_health_monitor_delete(request, mon_id):
neutronclient(request).delete_health_monitor(mon_id)
def member_create(request, **kwargs):
"""Create a load balance member
:param request: request context
:param pool_id: pool_id of pool for member
:param address: IP address
:param protocol_port: transport layer port number
:param weight: weight for member
:param admin_state_up: admin_state
"""
body = {'member': {'pool_id': kwargs['pool_id'],
'address': kwargs['address'],
'protocol_port': kwargs['protocol_port'],
'admin_state_up': kwargs['admin_state_up']
}}
if kwargs.get('weight'):
body['member']['weight'] = kwargs['weight']
member = neutronclient(request).create_member(body).get('member')
return Member(member)
def member_list(request, **kwargs):
return _member_list(request, expand_pool=True, **kwargs)
def _member_list(request, expand_pool, **kwargs):
members = neutronclient(request).list_members(**kwargs).get('members')
if expand_pool:
pools = _pool_list(request)
pool_dict = SortedDict((p.id, p) for p in pools)
for m in members:
m['pool_name'] = pool_dict.get(m['pool_id']).name_or_id
return [Member(m) for m in members]
def member_get(request, member_id):
return _member_get(request, member_id, expand_pool=True)
def _member_get(request, member_id, expand_pool):
member = neutronclient(request).show_member(member_id).get('member')
if expand_pool:
member['pool'] = _pool_get(request, member['pool_id'])
return Member(member)
def member_update(request, member_id, **kwargs):
member = neutronclient(request).update_member(member_id, kwargs)
return Member(member)
def member_delete(request, mem_id):
neutronclient(request).delete_member(mem_id)
def pool_monitor_association_create(request, **kwargs):
"""Associate a health monitor with pool
:param request: request context
:param monitor_id: id of monitor
:param pool_id: id of pool
"""
body = {'health_monitor': {'id': kwargs['monitor_id'], }}
neutronclient(request).associate_health_monitor(
kwargs['pool_id'], body)
def pool_monitor_association_delete(request, **kwargs):
"""Disassociate a health monitor from pool
:param request: request context
:param monitor_id: id of monitor
:param pool_id: id of pool
"""
neutronclient(request).disassociate_health_monitor(
kwargs['pool_id'], kwargs['monitor_id'])
| {
"content_hash": "1d17a298d85715ab72ef103d160d9c8e",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 78,
"avg_line_length": 33.28528528528528,
"alnum_prop": 0.6157524359437027,
"repo_name": "eoncloud-dev/eonboard",
"id": "7806777ea2a1ae1c97fea99480bfc5b76155c9d2",
"size": "11706",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "eoncloud_web/cloud/api/lbaas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1101905"
},
{
"name": "HTML",
"bytes": "407651"
},
{
"name": "JavaScript",
"bytes": "487948"
},
{
"name": "Python",
"bytes": "653807"
}
],
"symlink_target": ""
} |
import copy
import simplejson as json
from gettext import gettext as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.forms.util import ErrorList, ErrorDict
from django.shortcuts import render
from django.http import Http404
import reversion
from mozdns.address_record.models import AddressRecord
from mozdns.address_record.forms import AddressRecordFQDNForm
from mozdns.address_record.forms import AddressRecordForm
from mozdns.ptr.models import PTR
from mozdns.ptr.forms import PTRForm
from mozdns.srv.models import SRV
from mozdns.srv.forms import SRVForm, FQDNSRVForm
from mozdns.sshfp.models import SSHFP
from mozdns.sshfp.forms import SSHFPForm, FQDNSSHFPForm
from mozdns.txt.models import TXT
from mozdns.txt.forms import TXTForm, FQDNTXTForm
from mozdns.mx.models import MX
from mozdns.mx.forms import MXForm, FQDNMXForm
from mozdns.cname.models import CNAME
from mozdns.cname.forms import CNAMEFQDNForm, CNAMEForm
from mozdns.soa.models import SOA
from mozdns.soa.forms import SOAForm
from mozdns.domain.models import Domain
from mozdns.domain.forms import DomainForm
from mozdns.nameserver.models import Nameserver
from mozdns.nameserver.forms import NameserverForm
from mozdns.utils import ensure_label_domain, prune_tree
class RecordView(object):
form_template = 'record/ajax_form.html'
def get_context_data(self, context):
return context
def get(self, request, record_type, record_pk):
if not record_pk:
object_ = None
else:
try:
object_ = self.Klass.objects.get(pk=record_pk)
except self.Klass.DoesNotExist:
raise Http404
return self._display_object(request, object_, record_type, record_pk)
def _display_object(self, request, object_, record_type, record_pk):
domains = Domain.objects.filter(is_reverse=False)
if not object_:
form = self.DisplayForm()
else:
form = self.DisplayForm(instance=object_)
return render(request, self.form_template, {
'form': form,
'object_': object_,
'record_type': record_type if record_type else '',
'record_pk': record_pk if record_pk else '',
'domains': json.dumps([domain.name for domain in domains]),
})
def post(self, request, record_type, record_pk):
try:
object_ = self.Klass.objects.get(pk=str(record_pk))
except (ObjectDoesNotExist, ValueError):
# We will try to create an object
object_ = None
new_object, errors = self.post_handler(object_, record_type,
request.POST.copy())
if object_:
verb = "update"
else:
verb = "create"
object_ = new_object
if errors:
# Reload the object.
if object_:
object_ = self.Klass.objects.get(pk=object_.pk)
return_form = self.DisplayForm(request.POST)
return_form._errors = errors
message = "Errors during {0}. Commit Aborted.".format(verb)
else:
message = "Succesful {0}".format(verb)
return_form = self.DisplayForm(instance=new_object)
record_pk = new_object.pk
basic_context = {
'form': return_form,
'message': message,
'record_type': record_type,
'record_pk': record_pk,
'object_': object_
}
# Allow overrides
context = self.get_context_data(basic_context)
return render(request, self.form_template, context)
def modify_qd(self, qd):
fqdn = qd.pop('fqdn', [''])[0]
domain = None
# if record_type not in ('PTR', 'NS', 'DOMAIN', 'SOA'):
try:
label, domain = ensure_label_domain(fqdn)
# If something goes bad latter on you must call prune_tree on
# domain. If you don't do this there will be a domain leak.
except ValidationError, e:
errors = ErrorDict()
errors['fqdn'] = e.messages
return None, errors
qd['label'], qd['domain'] = label, str(domain.pk)
return qd, None
def post_handler(self, object_, record_type, orig_qd):
"""Create or update object_. qd is a QueryDict."""
qd = copy.deepcopy(
orig_qd) # If there are ever errors, we have to preserver
# the original qd
comment = qd.pop('comment', [''])[0].strip()
# This little chunk of code could be factored out, but I think it's
# more clear when you see which objects don't need to call this in one
# spot.
qd, errors = self.modify_qd(qd)
if errors:
return None, errors
# Create a save-able form to create/update the object
if object_:
object_form = self.form(qd, instance=object_)
else:
object_form = self.form(qd)
if object_form.is_valid():
try:
object_ = object_form.save()
reversion.set_comment(comment)
except ValidationError, e:
if 'domain' in qd:
prune_tree(Domain.objects.get(pk=qd['domain']))
e_dict = ErrorDict()
e_dict['__all__'] = ErrorList(e.messages)
return None, e_dict
return object_, None
else:
if 'domain' in qd:
prune_tree(Domain.objects.get(pk=qd['domain']))
return None, object_form._errors
def make_rdtype_tagger(tagged_klasses):
def tag(Klass):
tagged_klasses[Klass.__name__.strip('_')] = Klass
return Klass
return tag
obj_meta = {}
tag_rdtype = make_rdtype_tagger(obj_meta)
def get_obj_meta(record_type):
return obj_meta[record_type]
"""
Name the class the same as the rdtype it's standing for.
"""
@tag_rdtype
class A_(RecordView):
Klass = AddressRecord
form = AddressRecordForm
DisplayForm = AddressRecordFQDNForm
@tag_rdtype
class AAAA_(A_):
pass
@tag_rdtype
class CNAME_(RecordView):
Klass = CNAME
form = CNAMEForm
DisplayForm = CNAMEFQDNForm
@tag_rdtype
class DOMAIN_(RecordView):
Klass = Domain
form = DomainForm
DisplayForm = DomainForm
@tag_rdtype
class MX_(RecordView):
Klass = MX
form = MXForm
DisplayForm = FQDNMXForm
@tag_rdtype
class NS_(RecordView):
Klass = Nameserver
form = NameserverForm
DisplayForm = NameserverForm
def modify_qd(self, qd):
domain_pk = qd.pop('domain', '')[0]
try:
domain = Domain.objects.get(pk=domain_pk)
qd['domain'] = str(domain.pk)
except Domain.DoesNotExist:
error_message = _("Could not find domain with pk "
"'{0}'".format(domain_pk))
errors = ErrorDict()
errors['domain'] = [error_message]
return None, errors
return qd, None
@tag_rdtype
class PTR_(RecordView):
Klass = PTR
form = PTRForm
DisplayForm = PTRForm
def modify_qd(self, qd):
return qd, None
@tag_rdtype
class TXT_(RecordView):
Klass = TXT
form = TXTForm
DisplayForm = FQDNTXTForm
@tag_rdtype
class SSHFP_(RecordView):
Klass = SSHFP
form = SSHFPForm
DisplayForm = FQDNSSHFPForm
@tag_rdtype
class SOA_(RecordView):
Klass = SOA
form = SOAForm
DisplayForm = SOAForm
def modify_qd(self, qd):
return qd, None
@tag_rdtype
class SRV_(RecordView):
Klass = SRV
form = SRVForm
DisplayForm = FQDNSRVForm
| {
"content_hash": "c3d40409afbdf6244fc77c2ef5e86f80",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 78,
"avg_line_length": 28.687732342007436,
"alnum_prop": 0.6071012051315278,
"repo_name": "rtucker-mozilla/mozilla_inventory",
"id": "ba6eea22fe6e919926bc5c1d6a24a8930515973f",
"size": "7717",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mozdns/record/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CoffeeScript",
"bytes": "9538"
},
{
"name": "JavaScript",
"bytes": "1485560"
},
{
"name": "PHP",
"bytes": "27273"
},
{
"name": "Puppet",
"bytes": "6422"
},
{
"name": "Python",
"bytes": "1960271"
},
{
"name": "Ruby",
"bytes": "1459"
},
{
"name": "Shell",
"bytes": "8766"
}
],
"symlink_target": ""
} |
from views_generator import ViewsStyleGenerator
import unittest
class ViewsStyleGeneratorTest(unittest.TestCase):
def setUp(self):
self.generator = ViewsStyleGenerator()
def assertEqualToFile(self, value, filename):
with open(filename) as f:
contents = f.read()
self.assertEqual(
value, contents,
'\n>>>>>\n%s<<<<<\n\ndoes not match\n\n>>>>>\n%s<<<<<' %
(value, contents))
def testColorTestJSON(self):
self.generator.AddJSONFileToModel('colors_test.json5')
self.generator.out_file_path = (
'tools/style_variable_generator/colors_test_expected.h')
self.assertEqualToFile(self.generator.Render(),
'colors_test_expected.h')
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "ef3beb97a435b8243ff5b1f1e09bac11",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 72,
"avg_line_length": 32.34615384615385,
"alnum_prop": 0.5909631391200951,
"repo_name": "endlessm/chromium-browser",
"id": "49563e270c57a82048dd248d93745d940f5027ab",
"size": "1004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/style_variable_generator/views_generator_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Support for Recollect Waste curbside collection pickup."""
from datetime import timedelta
import logging
import recollect_waste
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_PICKUP_TYPES = "pickup_types"
ATTR_AREA_NAME = "area_name"
CONF_PLACE_ID = "place_id"
CONF_SERVICE_ID = "service_id"
DEFAULT_NAME = "recollect_waste"
ICON = "mdi:trash-can-outline"
SCAN_INTERVAL = timedelta(days=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PLACE_ID): cv.string,
vol.Required(CONF_SERVICE_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Recollect Waste platform."""
client = recollect_waste.RecollectWasteClient(
config[CONF_PLACE_ID], config[CONF_SERVICE_ID]
)
# Ensure the client can connect to the API successfully
# with given place_id and service_id.
try:
client.get_next_pickup()
except recollect_waste.RecollectWasteException as ex:
_LOGGER.error("Recollect Waste platform error. %s", ex)
return
add_entities([RecollectWasteSensor(config.get(CONF_NAME), client)], True)
class RecollectWasteSensor(Entity):
"""Recollect Waste Sensor."""
def __init__(self, name, client):
"""Initialize the sensor."""
self._attributes = {}
self._name = name
self._state = None
self.client = client
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return f"{self.client.place_id}{self.client.service_id}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def icon(self):
"""Icon to use in the frontend."""
return ICON
def update(self):
"""Update device state."""
try:
pickup_event = self.client.get_next_pickup()
self._state = pickup_event.event_date
self._attributes.update(
{
ATTR_PICKUP_TYPES: pickup_event.pickup_types,
ATTR_AREA_NAME: pickup_event.area_name,
}
)
except recollect_waste.RecollectWasteException as ex:
_LOGGER.error("Recollect Waste platform error. %s", ex)
| {
"content_hash": "b66651586befa575f1d8efb7814aef27",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 77,
"avg_line_length": 29.1875,
"alnum_prop": 0.6384725196288366,
"repo_name": "tchellomello/home-assistant",
"id": "bc1ace5369f21dfb7791bb91835cc839f172bead",
"size": "2802",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/recollect_waste/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
if __name__ == '__main__':
setup(name='zahpeeapi',
version='0.0.16',
description='Zahpee API Python Client',
long_description=read('README.md'),
author="Zahpee Dev Team",
author_email="[email protected]",
license='MIT',
url="http://www.zahpee.com",
scripts=[],
packages=find_packages('src/main/python'),
package_dir={'': 'src/main/python'},
classifiers=['Development Status :: 3 - Alpha', 'Programming Language :: Python'],
entry_points={
'console_scripts': []
},
zip_safe=True)
| {
"content_hash": "cc57e9ba3d711402034c6124c5249b6c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 92,
"avg_line_length": 32.625,
"alnum_prop": 0.5542784163473818,
"repo_name": "hekima/zahpee-api-python-client",
"id": "47b89c6d4c5d0042bf150b13a7524e0644053f37",
"size": "806",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15994"
}
],
"symlink_target": ""
} |
from .ar_model import AR
from .arima_model import ARMA, ARIMA
from . import vector_ar as var
from .arima_process import arma_generate_sample, ArmaProcess
from .vector_ar.var_model import VAR
from .vector_ar.svar_model import SVAR
from .vector_ar.dynamic import DynamicVAR
from .filters import api as filters
from . import tsatools
from .tsatools import (add_trend, detrend, lagmat, lagmat2ds, add_lag)
from . import interp
from . import stattools
from .stattools import *
from .base import datetools
from .seasonal import seasonal_decompose
from ..graphics import tsaplots as graphics
from .x13 import x13_arima_select_order
from .x13 import x13_arima_analysis
from .statespace import api as statespace
from .statespace.sarimax import SARIMAX
from .statespace.structural import UnobservedComponents
from .statespace.varmax import VARMAX
from .statespace.dynamic_factor import DynamicFactor
| {
"content_hash": "1b4b6d6ba21b45fe270f342aff569a28",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 70,
"avg_line_length": 38.69565217391305,
"alnum_prop": 0.8134831460674158,
"repo_name": "gef756/statsmodels",
"id": "3b8335949e753843028ca883af98c9981dfaf89d",
"size": "890",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "statsmodels/tsa/api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "Batchfile",
"bytes": "351"
},
{
"name": "C",
"bytes": "12088"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "Matlab",
"bytes": "1383"
},
{
"name": "Python",
"bytes": "8609450"
},
{
"name": "R",
"bytes": "34228"
},
{
"name": "Stata",
"bytes": "41179"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lots_admin', '0025_auto_20161129_0909'),
]
operations = [
migrations.AlterField(
model_name='address',
name='street_dir',
field=models.CharField(max_length=15, null=True),
),
migrations.AlterField(
model_name='address',
name='street_type',
field=models.CharField(max_length=15, null=True),
),
migrations.AlterField(
model_name='address',
name='ward',
field=models.CharField(max_length=15, null=True),
),
migrations.AlterField(
model_name='address',
name='zip_code',
field=models.CharField(max_length=15, null=True),
),
]
| {
"content_hash": "2aa11aee326d4d36c49acfe0a54746c9",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 61,
"avg_line_length": 27.181818181818183,
"alnum_prop": 0.5518394648829431,
"repo_name": "datamade/large-lots",
"id": "f5121fcdf1d95223a7496ee944c417ccedd69487",
"size": "968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lots_admin/migrations/0026_auto_20161129_0925.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "64596"
},
{
"name": "HTML",
"bytes": "284715"
},
{
"name": "Java",
"bytes": "504834"
},
{
"name": "JavaScript",
"bytes": "99708"
},
{
"name": "Makefile",
"bytes": "45589"
},
{
"name": "Python",
"bytes": "274413"
},
{
"name": "Shell",
"bytes": "339"
}
],
"symlink_target": ""
} |
from os import path
import pandas as pd
from econtools.metrics.api import reg, ivreg
class TestOLS_savemem(object):
@classmethod
def setup_class(cls):
"""Stata reg output from `sysuse auto; reg price mpg`"""
test_path = path.split(path.relpath(__file__))[0]
auto_path = path.join(test_path, 'data', 'auto.dta')
autodata = pd.read_stata(auto_path)
y = 'price'
x = ['mpg', 'length']
cls.result = reg(autodata, y, x, addcons=True, save_mem=True)
def test_sample(self):
assert not hasattr(self.result, 'sample')
def test_resid(self):
assert not hasattr(self.result, 'resid')
def test_yhat(self):
assert not hasattr(self.result, 'yhat')
class TestTsls_savemem(object):
@classmethod
def setup_class(cls):
"""Stata reg output from `sysuse auto; reg price mpg`"""
test_path = path.split(path.relpath(__file__))[0]
auto_path = path.join(test_path, 'data', 'auto.dta')
autodata = pd.read_stata(auto_path)
y = 'price'
x = ['mpg', 'length']
z = ['trunk', 'weight']
w = []
cls.result = ivreg(autodata, y, x, z, w, addcons=True, save_mem=True)
def test_sample(self):
assert not hasattr(self.result, 'sample')
def test_resid(self):
assert not hasattr(self.result, 'resid')
def test_yhat(self):
assert not hasattr(self.result, 'yhat')
if __name__ == '__main__':
import pytest
pytest.main()
| {
"content_hash": "4c12de58418190c3afafa3c4c2c91adc",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 77,
"avg_line_length": 27.035714285714285,
"alnum_prop": 0.5898282694848085,
"repo_name": "dmsul/econtools",
"id": "3d1dbfef0b22fe043ec3a1fa192e9521252b2cbe",
"size": "1514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "econtools/metrics/tests/test_savemem.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "165646"
},
{
"name": "Stata",
"bytes": "5107"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
from .constants import nutrition_endpoints
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
from kolibri.core.fields import JSONField
class PingbackNotification(models.Model):
id = models.CharField(max_length=50, primary_key=True)
version_range = models.CharField(max_length=50)
timestamp = models.DateField()
link_url = models.CharField(max_length=150, blank=True)
i18n = JSONField(default={})
active = models.BooleanField(default=True)
source = models.CharField(max_length=20, choices=nutrition_endpoints.choices)
class PingbackNotificationDismissed(models.Model):
permissions = IsOwn()
user = models.ForeignKey(FacilityUser)
notification = models.ForeignKey(PingbackNotification)
class Meta:
unique_together = (("user", "notification"),)
| {
"content_hash": "53327d364f51c0fe7c16dc15c86d25e7",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 81,
"avg_line_length": 30.966666666666665,
"alnum_prop": 0.7545748116254036,
"repo_name": "mrpau/kolibri",
"id": "b09ba8709594d5d2a8a682678b7183234c7df704",
"size": "953",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "kolibri/core/analytics/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "601"
},
{
"name": "CSS",
"bytes": "1716299"
},
{
"name": "Dockerfile",
"bytes": "7303"
},
{
"name": "Gherkin",
"bytes": "278074"
},
{
"name": "HTML",
"bytes": "26440"
},
{
"name": "JavaScript",
"bytes": "1537923"
},
{
"name": "Makefile",
"bytes": "13308"
},
{
"name": "Python",
"bytes": "2298911"
},
{
"name": "Shell",
"bytes": "11777"
},
{
"name": "Vue",
"bytes": "1558714"
}
],
"symlink_target": ""
} |
"""Support for Wink binary sensors."""
import logging
import pywink
from homeassistant.components.binary_sensor import BinarySensorDevice
from . import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
# These are the available sensors mapped to binary_sensor class
SENSOR_TYPES = {
"brightness": "light",
"capturing_audio": "sound",
"capturing_video": None,
"co_detected": "gas",
"liquid_detected": "moisture",
"loudness": "sound",
"motion": "motion",
"noise": "sound",
"opened": "opening",
"presence": "occupancy",
"smoke_detected": "smoke",
"vibration": "vibration",
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink binary sensor platform."""
for sensor in pywink.get_sensors():
_id = sensor.object_id() + sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
if sensor.capability() in SENSOR_TYPES:
add_entities([WinkBinarySensorDevice(sensor, hass)])
for key in pywink.get_keys():
_id = key.object_id() + key.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkBinarySensorDevice(key, hass)])
for sensor in pywink.get_smoke_and_co_detectors():
_id = sensor.object_id() + sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkSmokeDetector(sensor, hass)])
for hub in pywink.get_hubs():
_id = hub.object_id() + hub.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkHub(hub, hass)])
for remote in pywink.get_remotes():
_id = remote.object_id() + remote.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkRemote(remote, hass)])
for button in pywink.get_buttons():
_id = button.object_id() + button.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkButton(button, hass)])
for gang in pywink.get_gangs():
_id = gang.object_id() + gang.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkGang(gang, hass)])
for door_bell_sensor in pywink.get_door_bells():
_id = door_bell_sensor.object_id() + door_bell_sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkBinarySensorDevice(door_bell_sensor, hass)])
for camera_sensor in pywink.get_cameras():
_id = camera_sensor.object_id() + camera_sensor.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
try:
if camera_sensor.capability() in SENSOR_TYPES:
add_entities([WinkBinarySensorDevice(camera_sensor, hass)])
except AttributeError:
_LOGGER.info("Device isn't a sensor, skipping")
class WinkBinarySensorDevice(WinkDevice, BinarySensorDevice):
"""Representation of a Wink binary sensor."""
def __init__(self, wink, hass):
"""Initialize the Wink binary sensor."""
super().__init__(wink, hass)
if hasattr(self.wink, "unit"):
self._unit_of_measurement = self.wink.unit()
else:
self._unit_of_measurement = None
if hasattr(self.wink, "capability"):
self.capability = self.wink.capability()
else:
self.capability = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["binary_sensor"].append(self)
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.wink.state()
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return SENSOR_TYPES.get(self.capability)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return super().device_state_attributes
class WinkSmokeDetector(WinkBinarySensorDevice):
"""Representation of a Wink Smoke detector."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["test_activated"] = self.wink.test_activated()
return _attributes
class WinkHub(WinkBinarySensorDevice):
"""Representation of a Wink Hub."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["update_needed"] = self.wink.update_needed()
_attributes["firmware_version"] = self.wink.firmware_version()
_attributes["pairing_mode"] = self.wink.pairing_mode()
_kidde_code = self.wink.kidde_radio_code()
if _kidde_code is not None:
# The service call to set the Kidde code
# takes a string of 1s and 0s so it makes
# sense to display it to the user that way
_formatted_kidde_code = f"{_kidde_code:b}".zfill(8)
_attributes["kidde_radio_code"] = _formatted_kidde_code
return _attributes
class WinkRemote(WinkBinarySensorDevice):
"""Representation of a Wink Lutron Connected bulb remote."""
@property
def device_state_attributes(self):
"""Return the state attributes."""
_attributes = super().device_state_attributes
_attributes["button_on_pressed"] = self.wink.button_on_pressed()
_attributes["button_off_pressed"] = self.wink.button_off_pressed()
_attributes["button_up_pressed"] = self.wink.button_up_pressed()
_attributes["button_down_pressed"] = self.wink.button_down_pressed()
return _attributes
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return None
class WinkButton(WinkBinarySensorDevice):
"""Representation of a Wink Relay button."""
@property
def device_state_attributes(self):
"""Return the device state attributes."""
_attributes = super().device_state_attributes
_attributes["pressed"] = self.wink.pressed()
_attributes["long_pressed"] = self.wink.long_pressed()
return _attributes
class WinkGang(WinkBinarySensorDevice):
"""Representation of a Wink Relay gang."""
@property
def is_on(self):
"""Return true if the gang is connected."""
return self.wink.state()
| {
"content_hash": "3ea7068a2cfeaa0dddc79aa7caefd415",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 79,
"avg_line_length": 35.11290322580645,
"alnum_prop": 0.6239473281273924,
"repo_name": "qedi-r/home-assistant",
"id": "6dd22a3f7b8fd8686a5d8d1ccf8e9009d6ecdea3",
"size": "6531",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/wink/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18564720"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
"""
Description:
- Simple Factory Pattern implementation
- To notice them main idea a base class App was created and two other subclasses
@author: Paul Bodean
@date: 10/08/2017
"""
from typing import Union
from selenium.webdriver import Chrome, Firefox
from src.factory.pages.menu import Menu
from src.factory.pages.search import Search
class App(object):
"""
"""
def __init__(self, driver: Union[Chrome, Firefox]):
"""
:param driver: browser driver
:type driver: object
"""
self.__driver = driver
def factory(self, page: str):
"""
The access method which handles pages selection
:type page: str
"""
if page == 'Menu':
return Menu(self.__driver)
elif page == 'Search':
return Search(self.__driver)
else:
raise NotImplemented
| {
"content_hash": "2c2d04a4f967d9d78448b0547c3d1372",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 80,
"avg_line_length": 21.536585365853657,
"alnum_prop": 0.6058890147225368,
"repo_name": "paulbodean88/automation-design-patterns",
"id": "4226d5d6d64c3e5f2b4d46ac45f94d3a7e879f41",
"size": "883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/factory/simple_factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40634"
}
],
"symlink_target": ""
} |
"""
The modelgen module provides classes for specifying designs for individual
subject analysis of task-based fMRI experiments. In particular it also includes
algorithms for generating regressors for sparse and sparse-clustered acquisition
experiments.
These functions include:
* SpecifyModel: allows specification of sparse and non-sparse models
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data'))
>>> os.chdir(datadir)
"""
from copy import deepcopy
import os
from nibabel import load
import numpy as np
from scipy.special import gammaln
from nipype.interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath,
traits, File, Bunch, BaseInterfaceInputSpec,
isdefined)
from nipype.utils.filemanip import filename_to_list
from .. import config, logging
from nipype.external import six
iflogger = logging.getLogger('interface')
def gcd(a, b):
"""Returns the greatest common divisor of two integers
uses Euclid's algorithm
>>> gcd(4, 5)
1
>>> gcd(4, 8)
4
>>> gcd(22, 55)
11
"""
while b > 0: a, b = b, a % b
return a
def spm_hrf(RT, P=None, fMRI_T=16):
""" python implementation of spm_hrf
see spm_hrf for implementation details
% RT - scan repeat time
% p - parameters of the response function (two gamma
% functions)
% defaults (seconds)
% p(0) - delay of response (relative to onset) 6
% p(1) - delay of undershoot (relative to onset) 16
% p(2) - dispersion of response 1
% p(3) - dispersion of undershoot 1
% p(4) - ratio of response to undershoot 6
% p(5) - onset (seconds) 0
% p(6) - length of kernel (seconds) 32
%
% hrf - hemodynamic response function
% p - parameters of the response function
the following code using scipy.stats.distributions.gamma
doesn't return the same result as the spm_Gpdf function ::
hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) -
gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4]
>>> print spm_hrf(2)
[ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01
2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02
-3.73060781e-02 -3.08373716e-02 -2.05161334e-02 -1.16441637e-02
-5.82063147e-03 -2.61854250e-03 -1.07732374e-03 -4.10443522e-04
-1.46257507e-04]
"""
p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float)
if P is not None:
p[0:len(P)] = P
_spm_Gpdf = lambda x, h, l: np.exp(h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h))
# modelled hemodynamic response function - {mixture of Gammas}
dt = RT / float(fMRI_T)
u = np.arange(0, int(p[6] / dt + 1)) - p[5] / dt
hrf = _spm_Gpdf(u, p[0] / p[2], dt / p[2]) - _spm_Gpdf(u, p[1] / p[3],
dt / p[3]) / p[4]
idx = np.arange(0, int((p[6] / RT) + 1)) * fMRI_T
hrf = hrf[idx]
hrf = hrf / np.sum(hrf)
return hrf
def orth(x_in, y_in):
"""Orthoganlize y_in with respect to x_in
>>> orth_expected = np.array([1.7142857142857144, 0.42857142857142883, \
-0.85714285714285676])
>>> err = np.abs(np.array(orth([1, 2, 3],[4, 5, 6]) - orth_expected))
>>> all(err < np.finfo(float).eps)
True
"""
x = np.array(x_in)[:, None]
y = np.array(y_in)[:, None]
y = y - np.dot(x, np.dot(np.linalg.inv(np.dot(x.T, x)), np.dot(x.T, y)))
if np.linalg.norm(y, 1) > np.exp(-32):
y = y[:, 0].tolist()
else:
y = y_in
return y
def scale_timings(timelist, input_units, output_units, time_repetition):
"""Scales timings given input and output units (scans/secs)
Parameters
----------
timelist: list of times to scale
input_units: 'secs' or 'scans'
output_units: Ibid.
time_repetition: float in seconds
"""
if input_units==output_units:
_scalefactor = 1.
if (input_units == 'scans') and (output_units == 'secs'):
_scalefactor = time_repetition
if (input_units == 'secs') and (output_units == 'scans'):
_scalefactor = 1./time_repetition
timelist = [np.max([0., _scalefactor * t]) for t in timelist]
return timelist
def gen_info(run_event_files):
"""Generate subject_info structure from a list of event files
"""
info = []
for i, event_files in enumerate(run_event_files):
runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[])
for event_file in event_files:
_, name = os.path.split(event_file)
if '.run' in name:
name, _ = name.split('.run%03d' % (i+1))
elif '.txt' in name:
name, _ = name.split('.txt')
runinfo.conditions.append(name)
event_info = np.atleast_2d(np.loadtxt(event_file))
runinfo.onsets.append(event_info[:, 0].tolist())
if event_info.shape[1] > 1:
runinfo.durations.append(event_info[:, 1].tolist())
else:
runinfo.durations.append([0])
if event_info.shape[1] > 2:
runinfo.amplitudes.append(event_info[:, 2].tolist())
else:
delattr(runinfo, 'amplitudes')
info.append(runinfo)
return info
class SpecifyModelInputSpec(BaseInterfaceInputSpec):
subject_info = InputMultiPath(Bunch, mandatory=True, xor=['subject_info',
'event_files'],
desc=("Bunch or List(Bunch) subject specific condition information. "
"see :ref:`SpecifyModel` or SpecifyModel.__doc__ for details"))
event_files = InputMultiPath(traits.List(File(exists=True)), mandatory=True,
xor=['subject_info', 'event_files'],
desc=('list of event description files 1, 2 or 3 column format '
'corresponding to onsets, durations and amplitudes'))
realignment_parameters = InputMultiPath(File(exists=True),
desc="Realignment parameters returned by motion correction algorithm",
copyfile=False)
outlier_files = InputMultiPath(File(exists=True),
desc="Files containing scan outlier indices that should be tossed",
copyfile=False)
functional_runs = InputMultiPath(traits.Either(traits.List(File(exists=True)),
File(exists=True)),
mandatory=True,
desc=("Data files for model. List of 4D files or list of list of 3D "
"files per session"), copyfile=False)
input_units = traits.Enum('secs', 'scans', mandatory=True,
desc=("Units of event onsets and durations (secs or scans). Output "
"units are always in secs"))
high_pass_filter_cutoff = traits.Float(mandatory=True,
desc="High-pass filter cutoff in secs")
time_repetition = traits.Float(mandatory=True,
desc=("Time between the start of one volume to the start of "
"the next image volume."))
# Not implemented yet
#polynomial_order = traits.Range(0, low=0,
# desc ="Number of polynomial functions to model high pass filter.")
class SpecifyModelOutputSpec(TraitedSpec):
session_info = traits.Any(desc="session info for level1designs")
class SpecifyModel(BaseInterface):
"""Makes a model specification compatible with spm/fsl designers.
The subject_info field should contain paradigm information in the form of
a Bunch or a list of Bunch. The Bunch should contain the following
information::
[Mandatory]
- conditions : list of names
- onsets : lists of onsets corresponding to each condition
- durations : lists of durations corresponding to each condition. Should be
left to a single 0 if all events are being modelled as impulses.
[Optional]
- regressor_names : list of str
list of names corresponding to each column. Should be None if
automatically assigned.
- regressors : list of lists
values for each regressor - must correspond to the number of
volumes in the functional run
- amplitudes : lists of amplitudes for each event. This will be ignored by
SPM's Level1Design.
The following two (tmod, pmod) will be ignored by any Level1Design class
other than SPM:
- tmod : lists of conditions that should be temporally modulated. Should
default to None if not being used.
- pmod : list of Bunch corresponding to conditions
- name : name of parametric modulator
- param : values of the modulator
- poly : degree of modulation
Alternatively, you can provide information through event files.
The event files have to be in 1, 2 or 3 column format with the columns
corresponding to Onsets, Durations and Amplitudes and they have to have the
name event_name.runXXX... e.g.: Words.run001.txt. The event_name part will
be used to create the condition names.
Examples
--------
>>> from nipype.interfaces.base import Bunch
>>> s = SpecifyModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]],\
durations=[[1]]), \
Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \
durations=[[1]])]
>>> s.inputs.subject_info = info
Using pmod:
>>> info = [Bunch(conditions=['cond1', 'cond2'], \
onsets=[[2, 50],[100, 180]], durations=[[0],[0]], \
pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]),\
None]), \
Bunch(conditions=['cond1', 'cond2'], \
onsets=[[20, 120],[80, 160]], durations=[[0],[0]], \
pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \
None])]
>>> s.inputs.subject_info = info
"""
input_spec = SpecifyModelInputSpec
output_spec = SpecifyModelOutputSpec
def _generate_standard_design(self, infolist,
functional_runs=None,
realignment_parameters=None,
outliers=None):
""" Generates a standard design matrix paradigm given information about
each run
"""
sessinfo = []
output_units = 'secs'
if 'output_units' in self.inputs.traits():
output_units = self.inputs.output_units
for i, info in enumerate(infolist):
sessinfo.insert(i, dict(cond=[]))
if isdefined(self.inputs.high_pass_filter_cutoff):
sessinfo[i]['hpf'] = \
np.float(self.inputs.high_pass_filter_cutoff)
if hasattr(info, 'conditions') and info.conditions is not None:
for cid, cond in enumerate(info.conditions):
sessinfo[i]['cond'].insert(cid, dict())
sessinfo[i]['cond'][cid]['name'] = info.conditions[cid]
scaled_onset = scale_timings(info.onsets[cid],
self.inputs.input_units,
output_units,
self.inputs.time_repetition)
sessinfo[i]['cond'][cid]['onset'] = scaled_onset
scaled_duration = scale_timings(info.durations[cid],
self.inputs.input_units,
output_units,
self.inputs.time_repetition)
sessinfo[i]['cond'][cid]['duration'] = scaled_duration
if hasattr(info, 'amplitudes') and info.amplitudes:
sessinfo[i]['cond'][cid]['amplitudes'] = \
info.amplitudes[cid]
if hasattr(info, 'tmod') and info.tmod and \
len(info.tmod) > cid:
sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid]
if hasattr(info, 'pmod') and info.pmod and \
len(info.pmod) > cid:
if info.pmod[cid]:
sessinfo[i]['cond'][cid]['pmod'] = []
for j, name in enumerate(info.pmod[cid].name):
sessinfo[i]['cond'][cid]['pmod'].insert(j, {})
sessinfo[i]['cond'][cid]['pmod'][j]['name'] = \
name
sessinfo[i]['cond'][cid]['pmod'][j]['poly'] = \
info.pmod[cid].poly[j]
sessinfo[i]['cond'][cid]['pmod'][j]['param'] = \
info.pmod[cid].param[j]
sessinfo[i]['regress']= []
if hasattr(info, 'regressors') and info.regressors is not None:
for j, r in enumerate(info.regressors):
sessinfo[i]['regress'].insert(j, dict(name='', val=[]))
if hasattr(info, 'regressor_names') and \
info.regressor_names is not None:
sessinfo[i]['regress'][j]['name'] = \
info.regressor_names[j]
else:
sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j+1)
sessinfo[i]['regress'][j]['val'] = info.regressors[j]
sessinfo[i]['scans'] = functional_runs[i]
if realignment_parameters is not None:
for i, rp in enumerate(realignment_parameters):
mc = realignment_parameters[i]
for col in range(mc.shape[1]):
colidx = len(sessinfo[i]['regress'])
sessinfo[i]['regress'].insert(colidx, dict(name='', val=[]))
sessinfo[i]['regress'][colidx]['name'] = 'Realign%d' % (col + 1)
sessinfo[i]['regress'][colidx]['val'] = mc[:, col].tolist()
if outliers is not None:
for i, out in enumerate(outliers):
numscans = 0
for f in filename_to_list(sessinfo[i]['scans']):
shape = load(f).get_shape()
if len(shape) == 3 or shape[3] == 1:
iflogger.warning(("You are using 3D instead of 4D "
"files. Are you sure this was "
"intended?"))
numscans += 1
else:
numscans += shape[3]
for j, scanno in enumerate(out):
colidx = len(sessinfo[i]['regress'])
sessinfo[i]['regress'].insert(colidx, dict(name='', val=[]))
sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d'%(j+1)
sessinfo[i]['regress'][colidx]['val'] = \
np.zeros((1, numscans))[0].tolist()
sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1
return sessinfo
def _generate_design(self, infolist=None):
"""Generate design specification for a typical fmri paradigm
"""
realignment_parameters = []
if isdefined(self.inputs.realignment_parameters):
for parfile in self.inputs.realignment_parameters:
realignment_parameters.append(np.loadtxt(parfile))
outliers = []
if isdefined(self.inputs.outlier_files):
for filename in self.inputs.outlier_files:
try:
outindices = np.loadtxt(filename, dtype=int)
except IOError:
outliers.append([])
else:
if outindices.size == 1:
outliers.append([outindices.tolist()])
else:
outliers.append(outindices.tolist())
if infolist is None:
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
self._sessinfo = self._generate_standard_design(infolist,
functional_runs=self.inputs.functional_runs,
realignment_parameters=realignment_parameters,
outliers=outliers)
def _run_interface(self, runtime):
"""
"""
self._sessioninfo = None
self._generate_design()
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
if not hasattr(self, '_sessinfo'):
self._generate_design()
outputs['session_info'] = self._sessinfo
return outputs
class SpecifySPMModelInputSpec(SpecifyModelInputSpec):
concatenate_runs = traits.Bool(False, usedefault=True,
desc="Concatenate all runs to look like a single session.")
output_units = traits.Enum('secs', 'scans', usedefault=True,
desc="Units of design event onsets and durations (secs or scans)")
class SpecifySPMModel(SpecifyModel):
"""Adds SPM specific options to SpecifyModel
adds:
- concatenate_runs
- output_units
Examples
--------
>>> from nipype.interfaces.base import Bunch
>>> s = SpecifySPMModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.output_units = 'scans'
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.concatenate_runs = True
>>> info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \
durations=[[1]]), \
Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \
durations=[[1]])]
>>> s.inputs.subject_info = info
"""
input_spec = SpecifySPMModelInputSpec
def _concatenate_info(self, infolist):
nscans = []
for i, f in enumerate(self.inputs.functional_runs):
if isinstance(f, list):
numscans = len(f)
elif isinstance(f, six.string_types):
img = load(f)
numscans = img.get_shape()[3]
else:
raise Exception('Functional input not specified correctly')
nscans.insert(i, numscans)
# now combine all fields into 1
# names, onsets, durations, amplitudes, pmod, tmod, regressor_names,
# regressors
infoout = infolist[0]
for i, info in enumerate(infolist[1:]):
#info.[conditions, tmod] remain the same
if info.onsets:
for j, val in enumerate(info.onsets):
if self.inputs.input_units == 'secs':
onsets = np.array(info.onsets[j]) +\
self.inputs.time_repetition * \
sum(nscans[0:(i + 1)])
infoout.onsets[j].extend(onsets.tolist())
else:
onsets = np.array(info.onsets[j]) + \
sum(nscans[0:(i + 1)])
infoout.onsets[j].extend(onsets.tolist())
for j, val in enumerate(info.durations):
if len(val) > 1:
infoout.durations[j].extend(info.durations[j])
if hasattr(info, 'amplitudes') and info.amplitudes:
for j, val in enumerate(info.amplitudes):
infoout.amplitudes[j].extend(info.amplitudes[j])
if hasattr(info, 'pmod') and info.pmod:
for j, val in enumerate(info.pmod):
if val:
for key, data in enumerate(val.param):
infoout.pmod[j].param[key].extend(data)
if hasattr(info, 'regressors') and info.regressors:
#assumes same ordering of regressors across different
#runs and the same names for the regressors
for j, v in enumerate(info.regressors):
infoout.regressors[j].extend(info.regressors[j])
#insert session regressors
if not hasattr(infoout, 'regressors') or not infoout.regressors:
infoout.regressors = []
onelist = np.zeros((1, sum(nscans)))
onelist[0, sum(nscans[0:i]):sum(nscans[0:(i + 1)])] = 1
infoout.regressors.insert(len(infoout.regressors),
onelist.tolist()[0])
return [infoout], nscans
def _generate_design(self, infolist=None):
if not isdefined(self.inputs.concatenate_runs) or \
not self.inputs.concatenate_runs:
super(SpecifySPMModel, self)._generate_design(infolist=infolist)
return
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
concatlist, nscans = self._concatenate_info(infolist)
functional_runs = [filename_to_list(self.inputs.functional_runs)]
realignment_parameters = []
if isdefined(self.inputs.realignment_parameters):
realignment_parameters = []
for parfile in self.inputs.realignment_parameters:
mc = np.loadtxt(parfile)
if not realignment_parameters:
realignment_parameters.insert(0, mc)
else:
realignment_parameters[0] = \
np.concatenate((realignment_parameters[0], mc))
outliers = []
if isdefined(self.inputs.outlier_files):
outliers = [[]]
for i, filename in enumerate(self.inputs.outlier_files):
try:
out = np.loadtxt(filename, dtype=int)
except IOError:
out = np.array([])
if out.size > 0:
if out.size == 1:
outliers[0].extend([(np.array(out) +
sum(nscans[0:i])).tolist()])
else:
outliers[0].extend((np.array(out) +
sum(nscans[0:i])).tolist())
self._sessinfo = self._generate_standard_design(concatlist,
functional_runs=functional_runs,
realignment_parameters=realignment_parameters,
outliers=outliers)
class SpecifySparseModelInputSpec(SpecifyModelInputSpec):
time_acquisition = traits.Float(0, mandatory=True,
desc="Time in seconds to acquire a single image volume")
volumes_in_cluster=traits.Range(1, usedefault=True,
desc="Number of scan volumes in a cluster")
model_hrf = traits.Bool(desc="model sparse events with hrf")
stimuli_as_impulses = traits.Bool(True,
desc="Treat each stimulus to be impulse like.",
usedefault=True)
use_temporal_deriv = traits.Bool(requires=['model_hrf'],
desc="Create a temporal derivative in addition to regular regressor")
scale_regressors = traits.Bool(True, desc="Scale regressors by the peak",
usedefault=True)
scan_onset = traits.Float(0.0,
desc="Start of scanning relative to onset of run in secs",
usedefault=True)
save_plot = traits.Bool(desc=('save plot of sparse design calculation '
'(Requires matplotlib)'))
class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec):
sparse_png_file = File(desc='PNG file showing sparse design')
sparse_svg_file = File(desc='SVG file showing sparse design')
class SpecifySparseModel(SpecifyModel):
""" Specify a sparse model that is compatible with spm/fsl designers
References
----------
.. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of
sparse-sampling fMRI experiments. Front. Neurosci. 7:55
http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract
Examples
--------
>>> from nipype.interfaces.base import Bunch
>>> s = SpecifySparseModel()
>>> s.inputs.input_units = 'secs'
>>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii']
>>> s.inputs.time_repetition = 6
>>> s.inputs.time_acquisition = 2
>>> s.inputs.high_pass_filter_cutoff = 128.
>>> s.inputs.model_hrf = True
>>> info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \
durations=[[1]]), \
Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \
durations=[[1]])]
>>> s.inputs.subject_info = info
"""
input_spec = SpecifySparseModelInputSpec
output_spec = SpecifySparseModelOutputSpec
def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans):
"""Generates a regressor for a sparse/clustered-sparse acquisition
"""
bplot = False
if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
bplot=True
import matplotlib
matplotlib.use(config.get("execution", "matplotlib_backend"))
import matplotlib.pyplot as plt
TR = np.round(self.inputs.time_repetition * 1000) # in ms
if self.inputs.time_acquisition:
TA = np.round(self.inputs.time_acquisition * 1000) # in ms
else:
TA = TR # in ms
nvol = self.inputs.volumes_in_cluster
SCANONSET = np.round(self.inputs.scan_onset * 1000)
total_time = TR * (nscans - nvol) / nvol + TA * nvol + SCANONSET
SILENCE = TR - TA * nvol
dt = TA / 10.0
durations = np.round(np.array(i_durations) * 1000)
if len(durations) == 1:
durations = durations*np.ones((len(i_onsets)))
onsets = np.round(np.array(i_onsets) * 1000)
dttemp = gcd(TA, gcd(SILENCE, TR))
if dt < dttemp:
if dttemp % dt != 0:
dt = float(gcd(dttemp, dt))
if dt < 1:
raise Exception("Time multiple less than 1 ms")
iflogger.info("Setting dt = %d ms\n" % dt)
npts = int(np.ceil(total_time / dt))
times = np.arange(0, total_time, dt) * 1e-3
timeline = np.zeros((npts))
timeline2 = np.zeros((npts))
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
hrf = spm_hrf(dt * 1e-3)
reg_scale = 1.0
if self.inputs.scale_regressors:
boxcar = np.zeros((50.0 * 1e3 / dt))
if self.inputs.stimuli_as_impulses:
boxcar[1.0 * 1e3 / dt] = 1.0
reg_scale = float(TA / dt)
else:
boxcar[(1.0 * 1e3 / dt):(2.0 * 1e3 / dt)] = 1.0
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
response = np.convolve(boxcar, hrf)
reg_scale = 1.0 / response.max()
iflogger.info('response sum: %.4f max: %.4f' % (response.sum(),
response.max()))
iflogger.info('reg_scale: %.4f' % reg_scale)
for i, t in enumerate(onsets):
idx = int(np.round(t / dt))
if i_amplitudes:
if len(i_amplitudes) > 1:
timeline2[idx] = i_amplitudes[i]
else:
timeline2[idx] = i_amplitudes[0]
else:
timeline2[idx] = 1
if bplot:
plt.subplot(4, 1, 1)
plt.plot(times, timeline2)
if not self.inputs.stimuli_as_impulses:
if durations[i] == 0:
durations[i] = TA * nvol
stimdur = np.ones((int(durations[i] / dt)))
timeline2 = np.convolve(timeline2, stimdur)[0:len(timeline2)]
timeline += timeline2
timeline2[:] = 0
if bplot:
plt.subplot(4, 1, 2)
plt.plot(times, timeline)
if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf:
timeline = np.convolve(timeline, hrf)[0:len(timeline)]
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
#create temporal deriv
timederiv = np.concatenate(([0], np.diff(timeline)))
if bplot:
plt.subplot(4, 1, 3)
plt.plot(times, timeline)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
plt.plot(times, timederiv)
# sample timeline
timeline2 = np.zeros((npts))
reg = []
regderiv = []
for i, trial in enumerate(np.arange(nscans)/nvol):
scanstart = int((SCANONSET + trial * TR + (i % nvol) * TA) / dt)
scanidx = scanstart+np.arange(int(TA/dt))
timeline2[scanidx] = np.max(timeline)
reg.insert(i, np.mean(timeline[scanidx]) * reg_scale)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
regderiv.insert(i, np.mean(timederiv[scanidx]) * reg_scale)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
iflogger.info('orthoganlizing derivative w.r.t. main regressor')
regderiv = orth(reg, regderiv)
if bplot:
plt.subplot(4, 1, 3)
plt.plot(times, timeline2)
plt.subplot(4, 1, 4)
plt.bar(np.arange(len(reg)), reg, width=0.5)
plt.savefig('sparse.png')
plt.savefig('sparse.svg')
if regderiv:
return [reg, regderiv]
else:
return reg
def _cond_to_regress(self, info, nscans):
"""Converts condition information to full regressors
"""
reg = []
regnames = []
for i, cond in enumerate(info.conditions):
if hasattr(info, 'amplitudes') and info.amplitudes:
amplitudes = info.amplitudes[i]
else:
amplitudes = None
regnames.insert(len(regnames), cond)
scaled_onsets = scale_timings(info.onsets[i],
self.inputs.input_units,
'secs',
self.inputs.time_repetition)
scaled_durations = scale_timings(info.durations[i],
self.inputs.input_units,
'secs',
self.inputs.time_repetition)
regressor = self._gen_regress(scaled_onsets,
scaled_durations,
amplitudes,
nscans)
if isdefined(self.inputs.use_temporal_deriv) and \
self.inputs.use_temporal_deriv:
reg.insert(len(reg), regressor[0])
regnames.insert(len(regnames), cond + '_D')
reg.insert(len(reg), regressor[1])
else:
reg.insert(len(reg), regressor)
# need to deal with temporal and parametric modulators
# for sparse-clustered acquisitions enter T1-effect regressors
nvol = self.inputs.volumes_in_cluster
if nvol > 1:
for i in range(nvol-1):
treg = np.zeros((nscans/nvol, nvol))
treg[:, i] = 1
reg.insert(len(reg), treg.ravel().tolist())
regnames.insert(len(regnames), 'T1effect_%d' % i)
return reg, regnames
def _generate_clustered_design(self, infolist):
"""Generates condition information for sparse-clustered
designs.
"""
infoout = deepcopy(infolist)
for i, info in enumerate(infolist):
infoout[i].conditions = None
infoout[i].onsets = None
infoout[i].durations = None
if info.conditions:
img = load(self.inputs.functional_runs[i])
nscans = img.get_shape()[3]
reg, regnames = self._cond_to_regress(info, nscans)
if hasattr(infoout[i], 'regressors') and infoout[i].regressors:
if not infoout[i].regressor_names:
infoout[i].regressor_names = \
['R%d'%j for j in range(len(infoout[i].regressors))]
else:
infoout[i].regressors = []
infoout[i].regressor_names = []
for j, r in enumerate(reg):
regidx = len(infoout[i].regressors)
infoout[i].regressor_names.insert(regidx, regnames[j])
infoout[i].regressors.insert(regidx, r)
return infoout
def _generate_design(self, infolist=None):
if isdefined(self.inputs.subject_info):
infolist = self.inputs.subject_info
else:
infolist = gen_info(self.inputs.event_files)
sparselist = self._generate_clustered_design(infolist)
super(SpecifySparseModel, self)._generate_design(infolist = sparselist)
def _list_outputs(self):
outputs = self._outputs().get()
if not hasattr(self, '_sessinfo'):
self._generate_design()
outputs['session_info'] = self._sessinfo
if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
outputs['sparse_png_file'] = os.path.join(os.getcwd(), 'sparse.png')
outputs['sparse_svg_file'] = os.path.join(os.getcwd(), 'sparse.svg')
return outputs
| {
"content_hash": "c0050708e50264d431e2c053b7b0ec8c",
"timestamp": "",
"source": "github",
"line_count": 795,
"max_line_length": 98,
"avg_line_length": 43.59496855345912,
"alnum_prop": 0.5355762017427433,
"repo_name": "wanderine/nipype",
"id": "4114aa308347abfa41825e2b995ad8e402dd56aa",
"size": "34772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nipype/algorithms/modelgen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4796302"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
import os
import sys
from unittest import defaultTestLoader, TextTestRunner, TestSuite
TESTS = ('form', 'fields', 'validators', 'widgets', 'webob_wrapper', 'csrf', 'ext_csrf', 'i18n')
OPTIONAL_TESTS = ('ext_django.tests', 'ext_sqlalchemy', 'ext_dateutil', 'locale_babel')
def make_suite(prefix='', extra=(), force_all=False):
tests = TESTS + extra
test_names = list(prefix + x for x in tests)
suite = TestSuite()
suite.addTest(defaultTestLoader.loadTestsFromNames(test_names))
for name in OPTIONAL_TESTS:
test_name = prefix + name
try:
suite.addTest(defaultTestLoader.loadTestsFromName(test_name))
except (ImportError, AttributeError):
if force_all:
# If force_all, don't let us skip tests
raise ImportError('Could not load test module %s and force_all is enabled.' % test_name)
sys.stderr.write("### Disabled test '%s', dependency not found\n" % name)
return suite
def additional_tests():
"""
This is called automatically by setup.py test
"""
return make_suite('tests.')
def main():
my_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.abspath(os.path.join(my_dir, '..')))
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--with-pep8', action='store_true', dest='with_pep8', default=False)
parser.add_option('--force-all', action='store_true', dest='force_all', default=False)
parser.add_option('-v', '--verbose', action='count', dest='verbosity', default=0)
parser.add_option('-q', '--quiet', action='count', dest='quietness', default=0)
options, extra_args = parser.parse_args()
has_pep8 = False
try:
import pep8
has_pep8 = True
except ImportError:
if options.with_pep8:
sys.stderr.write('# Could not find pep8 library.')
sys.exit(1)
if has_pep8:
guide_main = pep8.StyleGuide(
ignore=[],
paths=['wtforms/'],
exclude=[],
max_line_length=130,
)
guide_tests = pep8.StyleGuide(
ignore=['E221'],
paths=['tests/'],
max_line_length=150,
)
for guide in (guide_main, guide_tests):
report = guide.check_files()
if report.total_errors:
sys.exit(1)
suite = make_suite('', tuple(extra_args), options.force_all)
runner = TextTestRunner(verbosity=options.verbosity - options.quietness + 1)
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
| {
"content_hash": "30c8fb2d5aceaddf4ae61327f6ff9b11",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 104,
"avg_line_length": 34.08974358974359,
"alnum_prop": 0.6039864610755923,
"repo_name": "pawl/wtforms",
"id": "c65ecd52a65165ce37d64264541ff9af9c16cfe1",
"size": "2681",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/runtests.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2798"
},
{
"name": "Makefile",
"bytes": "2342"
},
{
"name": "Python",
"bytes": "312846"
},
{
"name": "Shell",
"bytes": "2985"
}
],
"symlink_target": ""
} |
"""
Provides a command container for additional tox commands, used in "tox.ini".
COMMANDS:
* copytree
* copy
* py2to3
REQUIRES:
* argparse
"""
from glob import glob
import argparse
import inspect
import os.path
import shutil
import sys
import collections
__author__ = "Jens Engel"
__copyright__ = "(c) 2013 by Jens Engel"
__license__ = "BSD"
# -----------------------------------------------------------------------------
# CONSTANTS:
# -----------------------------------------------------------------------------
VERSION = "0.1.0"
FORMATTER_CLASS = argparse.RawDescriptionHelpFormatter
# -----------------------------------------------------------------------------
# SUBCOMMAND: copytree
# -----------------------------------------------------------------------------
def command_copytree(args):
"""
Copy one or more source directory(s) below a destination directory.
Parts of the destination directory path are created if needed.
Similar to the UNIX command: 'cp -R srcdir destdir'
"""
for srcdir in args.srcdirs:
basename = os.path.basename(srcdir)
destdir2 = os.path.normpath(os.path.join(args.destdir, basename))
if os.path.exists(destdir2):
shutil.rmtree(destdir2)
sys.stdout.write("copytree: %s => %s\n" % (srcdir, destdir2))
shutil.copytree(srcdir, destdir2)
return 0
def setup_parser_copytree(parser):
parser.add_argument("srcdirs", nargs="+", help="Source directory(s)")
parser.add_argument("destdir", help="Destination directory")
command_copytree.usage = "%(prog)s srcdir... destdir"
command_copytree.short = "Copy source dir(s) below a destination directory."
command_copytree.setup_parser = setup_parser_copytree
# -----------------------------------------------------------------------------
# SUBCOMMAND: copy
# -----------------------------------------------------------------------------
def command_copy(args):
"""
Copy one or more source-files(s) to a destpath (destfile or destdir).
Destdir mode is used if:
* More than one srcfile is provided
* Last parameter ends with a slash ("/").
* Last parameter is an existing directory
Destination directory path is created if needed.
Similar to the UNIX command: 'cp srcfile... destpath'
"""
sources = args.sources
destpath = args.destpath
source_files = []
for file_ in sources:
if "*" in file_:
selected = glob(file_)
source_files.extend(selected)
elif os.path.isfile(file_):
source_files.append(file_)
if destpath.endswith("/") or os.path.isdir(destpath) or len(sources) > 1:
# -- DESTDIR-MODE: Last argument is a directory.
destdir = destpath
else:
# -- DESTFILE-MODE: Copy (and rename) one file.
assert len(source_files) == 1
destdir = os.path.dirname(destpath)
# -- WORK-HORSE: Copy one or more files to destpath.
if not os.path.isdir(destdir):
sys.stdout.write("copy: Create dir %s\n" % destdir)
os.makedirs(destdir)
for source in source_files:
destname = os.path.join(destdir, os.path.basename(source))
sys.stdout.write("copy: %s => %s\n" % (source, destname))
shutil.copy(source, destname)
return 0
def setup_parser_copy(parser):
parser.add_argument("sources", nargs="+", help="Source files.")
parser.add_argument("destpath", help="Destination path")
command_copy.usage = "%(prog)s sources... destpath"
command_copy.short = "Copy one or more source files to a destinition."
command_copy.setup_parser = setup_parser_copy
# -----------------------------------------------------------------------------
# SUBCOMMAND: mkdir
# -----------------------------------------------------------------------------
def command_mkdir(args):
"""
Create a non-existing directory (or more ...).
If the directory exists, the step is skipped.
Similar to the UNIX command: 'mkdir -p dir'
"""
errors = 0
for directory in args.dirs:
if os.path.exists(directory):
if not os.path.isdir(directory):
# -- SANITY CHECK: directory exists, but as file...
sys.stdout.write("mkdir: %s\n" % directory)
sys.stdout.write("ERROR: Exists already, but as file...\n")
errors += 1
else:
# -- NORMAL CASE: Directory does not exits yet.
assert not os.path.isdir(directory)
sys.stdout.write("mkdir: %s\n" % directory)
os.makedirs(directory)
return errors
def setup_parser_mkdir(parser):
parser.add_argument("dirs", nargs="+", help="Directory(s)")
command_mkdir.usage = "%(prog)s dir..."
command_mkdir.short = "Create non-existing directory (or more...)."
command_mkdir.setup_parser = setup_parser_mkdir
# -----------------------------------------------------------------------------
# SUBCOMMAND: py2to3
# -----------------------------------------------------------------------------
command_py2to4_work_around3k = True
def command_py2to3(args):
"""
Apply '2to3' tool (Python2 to Python3 conversion tool) to Python sources.
"""
from lib2to3.main import main
args2 = []
if command_py2to4_work_around3k:
if args.no_diffs:
args2.append("--no-diffs")
if args.write:
args2.append("-w")
if args.nobackups:
args2.append("-n")
args2.extend(args.sources)
sys.exit(main("lib2to3.fixes", args=args2))
def setup_parser4py2to3(parser):
if command_py2to4_work_around3k:
parser.add_argument("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
parser.add_argument("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_argument("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
parser.add_argument("sources", nargs="+", help="Source files.")
command_py2to3.name = "2to3"
command_py2to3.usage = "%(prog)s sources..."
command_py2to3.short = "Apply python's 2to3 tool to Python sources."
command_py2to3.setup_parser = setup_parser4py2to3
# -----------------------------------------------------------------------------
# COMMAND HELPERS/UTILS:
# -----------------------------------------------------------------------------
def discover_commands():
commands = []
for name, func in inspect.getmembers(inspect.getmodule(toxcmd_main)):
if name.startswith("__"):
continue
if name.startswith("command_") and isinstance(func, collections.Callable):
command_name0 = name.replace("command_", "")
command_name = getattr(func, "name", command_name0)
commands.append(Command(command_name, func))
return commands
class Command(object):
def __init__(self, name, func):
assert isinstance(name, str)
assert isinstance(func, collections.Callable)
self.name = name
self.func = func
self.parser = None
def setup_parser(self, command_parser):
setup_parser = getattr(self.func, "setup_parser", None)
if setup_parser and isinstance(setup_parser, collections.Callable):
setup_parser(command_parser)
else:
command_parser.add_argument("args", nargs="*")
@property
def usage(self):
usage = getattr(self.func, "usage", None)
return usage
@property
def short_description(self):
short_description = getattr(self.func, "short", "")
return short_description
@property
def description(self):
return inspect.getdoc(self.func)
def __call__(self, args):
return self.func(args)
# -----------------------------------------------------------------------------
# MAIN-COMMAND:
# -----------------------------------------------------------------------------
def toxcmd_main(args=None):
"""Command util with subcommands for tox environments."""
usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..."
if args is None:
args = sys.argv[1:]
# -- STEP: Build command-line parser.
parser = argparse.ArgumentParser(description=inspect.getdoc(toxcmd_main),
formatter_class=FORMATTER_CLASS)
common_parser = parser.add_argument_group("Common options")
common_parser.add_argument("--version", action="version", version=VERSION)
subparsers = parser.add_subparsers(help="commands")
for command in discover_commands():
command_parser = subparsers.add_parser(command.name,
usage=command.usage,
description=command.description,
help=command.short_description,
formatter_class=FORMATTER_CLASS)
command_parser.set_defaults(func=command)
command.setup_parser(command_parser)
command.parser = command_parser
# -- STEP: Process command-line and run command.
options = parser.parse_args(args)
command_function = options.func
return command_function(options)
# -----------------------------------------------------------------------------
# MAIN:
# -----------------------------------------------------------------------------
if __name__ == "__main__":
sys.exit(toxcmd_main())
| {
"content_hash": "5341459a265c52aebc968197e1bead77",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 84,
"avg_line_length": 35.61194029850746,
"alnum_prop": 0.5420159262363788,
"repo_name": "connorsml/behave",
"id": "c7b99f773444f36ddc45d0299eb05324e23f498e",
"size": "9591",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "bin/toxcmd3.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "272"
},
{
"name": "Cucumber",
"bytes": "589239"
},
{
"name": "Python",
"bytes": "758721"
},
{
"name": "Shell",
"bytes": "856"
}
],
"symlink_target": ""
} |
from var_plots import plot_forecast
plot_forecast()
| {
"content_hash": "d9e7255fa048624bdb4ea098070c0d5f",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 35,
"avg_line_length": 17.666666666666668,
"alnum_prop": 0.7924528301886793,
"repo_name": "josef-pkt/statsmodels",
"id": "596890ffe3e3cc8b3be9c28d0dbf0315b87054e0",
"size": "53",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "docs/source/plots/var_plot_forecast.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10035"
},
{
"name": "Batchfile",
"bytes": "625"
},
{
"name": "C",
"bytes": "381"
},
{
"name": "Cython",
"bytes": "225838"
},
{
"name": "Fortran",
"bytes": "16671"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "MATLAB",
"bytes": "100525"
},
{
"name": "Python",
"bytes": "14428857"
},
{
"name": "R",
"bytes": "106569"
},
{
"name": "Shell",
"bytes": "25322"
},
{
"name": "Stata",
"bytes": "50129"
}
],
"symlink_target": ""
} |
"""
Implementations of several key algorithms, such as: TSP, Graph, SuperMap, Linear Programming, ML, etc. used by other modules.
"""
from jcvi.apps.base import dmain
if __name__ == "__main__":
dmain(__file__)
| {
"content_hash": "c3010e7cac04de97c39d0edbfb8c53be",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 125,
"avg_line_length": 24.11111111111111,
"alnum_prop": 0.6682027649769585,
"repo_name": "tanghaibao/jcvi",
"id": "baf6ccd7ed9226058421e2262c2233a92e16a404",
"size": "263",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "jcvi/algorithms/__main__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Cython",
"bytes": "10467"
},
{
"name": "Dockerfile",
"bytes": "1150"
},
{
"name": "Makefile",
"bytes": "445"
},
{
"name": "Python",
"bytes": "2635155"
}
],
"symlink_target": ""
} |
import pytest
from stix2.datastore import (
CompositeDataSource, DataSink, DataSource, DataStoreMixin,
)
from stix2.datastore.filters import Filter
from .constants import CAMPAIGN_MORE_KWARGS
def test_datasource_abstract_class_raises_error():
with pytest.raises(TypeError):
DataSource()
def test_datasink_abstract_class_raises_error():
with pytest.raises(TypeError):
DataSink()
def test_datastore_smoke():
assert DataStoreMixin() is not None
def test_datastore_get_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().get("indicator--00000000-0000-4000-8000-000000000001")
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_all_versions_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().all_versions("indicator--00000000-0000-4000-8000-000000000001")
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_query_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().query([Filter("type", "=", "indicator")])
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_creator_of_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().creator_of(CAMPAIGN_MORE_KWARGS)
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_relationships_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().relationships(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_related_to_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().related_to(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_add_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().add(CAMPAIGN_MORE_KWARGS)
assert "DataStoreMixin has no data sink to put objects in" == str(excinfo.value)
def test_composite_datastore_get_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().get("indicator--00000000-0000-4000-8000-000000000001")
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_all_versions_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().all_versions("indicator--00000000-0000-4000-8000-000000000001")
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_query_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().query([Filter("type", "=", "indicator")])
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_relationships_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().relationships(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_related_to_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().related_to(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_add_data_source_raises_error():
with pytest.raises(TypeError) as excinfo:
ind = "indicator--00000000-0000-4000-8000-000000000001"
CompositeDataSource().add_data_source(ind)
assert "DataSource (to be added) is not of type stix2.DataSource. DataSource type is '{}'".format(type(ind)) == str(excinfo.value)
def test_composite_datastore_add_data_sources_raises_error():
with pytest.raises(TypeError) as excinfo:
ind = "indicator--00000000-0000-4000-8000-000000000001"
CompositeDataSource().add_data_sources(ind)
assert "DataSource (to be added) is not of type stix2.DataSource. DataSource type is '{}'".format(type(ind)) == str(excinfo.value)
def test_composite_datastore_no_datasource():
cds = CompositeDataSource()
with pytest.raises(AttributeError) as excinfo:
cds.get("indicator--00000000-0000-4000-8000-000000000001")
assert 'CompositeDataSource has no data source' in str(excinfo.value)
| {
"content_hash": "d799225360f5cc528108d6148a3a5934",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 134,
"avg_line_length": 37.51968503937008,
"alnum_prop": 0.7164742917103882,
"repo_name": "oasis-open/cti-python-stix2",
"id": "8bb5494c48181e5c69dcf8b07e39330c06215d2f",
"size": "4765",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "stix2/test/v20/test_datastore.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1737742"
}
],
"symlink_target": ""
} |
from copy import deepcopy
from typing import Any, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from . import models
from ._configuration import ServiceFabricManagementClientConfiguration
from ._serialization import Deserializer, Serializer
from .operations import (
ApplicationTypeVersionsOperations,
ApplicationTypesOperations,
ApplicationsOperations,
ClusterVersionsOperations,
ClustersOperations,
Operations,
ServicesOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class ServiceFabricManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
"""Service Fabric Management Client.
:ivar clusters: ClustersOperations operations
:vartype clusters: azure.mgmt.servicefabric.operations.ClustersOperations
:ivar cluster_versions: ClusterVersionsOperations operations
:vartype cluster_versions: azure.mgmt.servicefabric.operations.ClusterVersionsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.servicefabric.operations.Operations
:ivar application_types: ApplicationTypesOperations operations
:vartype application_types: azure.mgmt.servicefabric.operations.ApplicationTypesOperations
:ivar application_type_versions: ApplicationTypeVersionsOperations operations
:vartype application_type_versions:
azure.mgmt.servicefabric.operations.ApplicationTypeVersionsOperations
:ivar applications: ApplicationsOperations operations
:vartype applications: azure.mgmt.servicefabric.operations.ApplicationsOperations
:ivar services: ServicesOperations operations
:vartype services: azure.mgmt.servicefabric.operations.ServicesOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The customer subscription identifier. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2021-06-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ServiceFabricManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.clusters = ClustersOperations(self._client, self._config, self._serialize, self._deserialize)
self.cluster_versions = ClusterVersionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.application_types = ApplicationTypesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.application_type_versions = ApplicationTypeVersionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.applications = ApplicationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.services = ServicesOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ServiceFabricManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| {
"content_hash": "8d27ce6cb474a1fba9ad2b4a9f2ffff8",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 119,
"avg_line_length": 46.42857142857143,
"alnum_prop": 0.7154751131221719,
"repo_name": "Azure/azure-sdk-for-python",
"id": "7b2941cf9f5d2b688aeb423c3df36df979d44c48",
"size": "5993",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/servicefabric/azure-mgmt-servicefabric/azure/mgmt/servicefabric/_service_fabric_management_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import os
import numpy as np
from numpy.testing import assert_array_almost_equal
from nose.tools import assert_true, assert_equal, assert_raises
try:
from nose.tools import assert_list_equal
except ImportError:
from landlab.testing.tools import assert_list_equal
from landlab.testing.tools import cdtemp
from landlab.io import write_esri_ascii, read_esri_ascii
from landlab import RasterModelGrid
def test_grid_with_no_fields():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
with cdtemp() as _:
assert_raises(ValueError, write_esri_ascii, 'test.asc', grid)
def test_grid_with_one_field():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
with cdtemp() as _:
files = write_esri_ascii('test.asc', grid)
assert_list_equal(files, ['test.asc'])
for fname in files:
assert_true(os.path.isfile(fname))
def test_grid_with_two_fields():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
grid.add_field('node', 'land_surface__elevation', np.arange(20.))
with cdtemp() as _:
files = write_esri_ascii('test.asc', grid)
files.sort()
assert_list_equal(files, ['test_air__temperature.asc',
'test_land_surface__elevation.asc'])
for fname in files:
assert_true(os.path.isfile(fname))
def test_names_keyword_as_str_or_list():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
grid.add_field('node', 'land_surface__elevation', np.arange(20.))
with cdtemp() as _:
files = write_esri_ascii('test.asc', grid, names='air__temperature')
assert_list_equal(files, ['test.asc'])
assert_true(os.path.isfile('test.asc'))
with cdtemp() as _:
files = write_esri_ascii('test.asc', grid, names=['air__temperature'])
assert_list_equal(files, ['test.asc'])
assert_true(os.path.isfile('test.asc'))
def test_names_keyword_multiple_names():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
grid.add_field('node', 'land_surface__elevation', np.arange(20.))
with cdtemp() as _:
files = write_esri_ascii('test.asc', grid,
names=['air__temperature',
'land_surface__elevation'])
files.sort()
assert_list_equal(files, ['test_air__temperature.asc',
'test_land_surface__elevation.asc'])
for fname in files:
assert_true(os.path.isfile(fname))
def test_names_keyword_with_bad_name():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
with cdtemp() as _:
assert_raises(ValueError, write_esri_ascii, 'test.asc', grid,
names='not_a_name')
def test_clobber_keyword():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
with cdtemp() as _:
write_esri_ascii('test.asc', grid)
assert_raises(ValueError, write_esri_ascii, 'test.asc', grid)
assert_raises(ValueError, write_esri_ascii, 'test.asc', grid,
clobber=False)
write_esri_ascii('test.asc', grid, clobber=True)
def test_write_then_read():
grid = RasterModelGrid((4, 5), spacing=(2., 2.))
grid.add_field('node', 'air__temperature', np.arange(20.))
with cdtemp() as _:
write_esri_ascii('test.asc', grid)
new_grid, field = read_esri_ascii('test.asc')
assert_equal(grid.number_of_node_columns, new_grid.number_of_node_columns)
assert_equal(grid.number_of_node_rows, new_grid.number_of_node_rows)
assert_equal(grid.dx, new_grid.dx)
assert_array_almost_equal(grid.node_x, new_grid.node_x)
assert_array_almost_equal(grid.node_y, new_grid.node_y)
assert_array_almost_equal(field, grid.at_node['air__temperature'])
| {
"content_hash": "0b88888c6c57901d125c84e7c0ad8245",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 78,
"avg_line_length": 37.32432432432432,
"alnum_prop": 0.6162201303403331,
"repo_name": "SiccarPoint/landlab",
"id": "c0a02f888c30783b7426a43c0840fd7debd3a742",
"size": "4166",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "landlab/io/tests/test_write_esri_ascii.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1452"
},
{
"name": "PowerShell",
"bytes": "7128"
},
{
"name": "Python",
"bytes": "2619353"
},
{
"name": "Shell",
"bytes": "3132"
}
],
"symlink_target": ""
} |
import time
import unittest
from autothreadharness.harness_case import HarnessCase
class SED_6_5_1(HarnessCase):
role = HarnessCase.ROLE_SED
case = '6 5 1'
golden_devices_required = 1
def on_dialog(self, dialog, title):
if title.startswith('Reset DUT'):
time.sleep(self.child_timeout + 5)
self.dut.stop()
time.sleep(1)
self.dut.start()
return False
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "d3ab7c476da91637951d644bc2cafda8",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 54,
"avg_line_length": 22.09090909090909,
"alnum_prop": 0.5946502057613169,
"repo_name": "bukepo/openthread",
"id": "f50f4565070a1da3730af4cbccbdf27abbe2ef38",
"size": "2067",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "tools/harness-automation/cases_R140/sed_6_5_1.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "50"
},
{
"name": "C",
"bytes": "1080565"
},
{
"name": "C++",
"bytes": "5839893"
},
{
"name": "CMake",
"bytes": "95509"
},
{
"name": "Dockerfile",
"bytes": "6286"
},
{
"name": "M4",
"bytes": "36443"
},
{
"name": "Makefile",
"bytes": "161153"
},
{
"name": "Python",
"bytes": "3379923"
},
{
"name": "Shell",
"bytes": "134708"
}
],
"symlink_target": ""
} |
__all__ = ['Report', 'TestHelperTMP']
from storm.twisted.transact import transact
from storm.locals import *
from oonib.report.db import getStore, transactor
class OModel(object):
transactor = transactor
@transact
def create(query):
store = Store(database)
store.execute(query)
store.commit()
@transact
def save(self):
store = getStore()
store.add(self)
store.commit()
class Report(OModel):
"""
This represents an OONI Report as stored in the database.
report_id: this is generated by the backend and is used by the client to
reference a previous report and append to it. It should be
treated as a shared secret between the probe and backend.
software_name: this indicates the name of the software performing the test
(this will default to ooniprobe)
software_version: this is the version number of the software running the
test.
test_name: the name of the test on which the report is being created.
test_version: indicates the version of the test
progress: what is the current progress of the report. This allows clients
to report event partial reports up to a certain percentage of
progress. Once the report is complete progress will be 100.
content: what is the content of the report. If the current progress is less
than 100 we should append to the YAML data structure that is
currently stored in such field.
"""
__storm_table__ = 'reports'
createQuery = "CREATE TABLE " + __storm_table__ +\
"(id INTEGER PRIMARY KEY, report_id VARCHAR, software_name VARCHAR,"\
"software_version VARCHAR, test_name VARCHAR, test_version VARCHAR,"\
"progress VARCHAR, content VARCHAR)"
id = Int(primary=True)
report_id = Unicode()
software_name = Unicode()
software_version = Unicode()
test_name = Unicode()
test_version = Unicode()
progress = Unicode()
content = Unicode()
class TestHelperTMP(OModel):
__storm_table__ = 'testhelpertmp'
createQuery = "CREATE TABLE " + __storm_table__ +\
"(id INTEGER PRIMARY KEY, report_id VARCHAR, test_helper VARCHAR,"\
" client_ip VARCHAR, creation_time VARCHAR)"
id = Int(primary=True)
report_id = Unicode()
test_helper = Unicode()
client_ip = Unicode()
creation_time = Date()
| {
"content_hash": "3dd5f118cd62ddbba076cac5ee7bb907",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 87,
"avg_line_length": 30.337349397590362,
"alnum_prop": 0.6350277998411438,
"repo_name": "hackerberry/ooni-probe",
"id": "21e60eb9d1dd45cb7d3df9adada8ccef06af4ba4",
"size": "2518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oonib/report/db/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "519235"
},
{
"name": "Shell",
"bytes": "10418"
}
],
"symlink_target": ""
} |
import rospy
from sensor_msgs.msg import PointCloud2, Image
from cv_bridge import CvBridge
import cv, cv2
import numpy
import matplotlib
import copy
from rosCV import rosCV as rcv
# import code # FOR TESTING
class Bound():
def __init__(self, topic):
# self.defaultManip = rospy.get_param('boundingBox/defaultManip')
rospy.Subscriber(topic, Image, self.image_callback)
rospy.Subscriber('camera/depth/image', Image, self.depth_callback)
self.rcv = rcv()
self.pub = rospy.Publisher(topic + '_filtered', Image)
def depth_callback(self, image):
self.depth_image = self.rcv.depthToCv2(image)
def image_callback(self, image_in):
""" Get image to which we're subscribed. """
# Import and convert
image_cv2 = self.rcv.toCv2(image_in)
image_hsv = cv2.cvtColor(image_cv2, cv2.COLOR_BGR2HSV)
try:
pink_lowerb = numpy.array((140, 100,100))
pink_upperb = numpy.array((170,255, 255))
pink_x, pink_y, pink_area = self.rcv.find_marker(image_cv2, pink_lowerb, pink_upperb)
green_lowerb = numpy.array((50, 100,100))
green_upperb = numpy.array((80,255, 255))
green_x, green_y, green_area = self.rcv.find_marker(image_cv2, green_lowerb, green_upperb)
special_area = image_hsv[pink_y:green_y, pink_x:green_x]
markings = image_cv2[pink_y:green_y, pink_x:green_x]
markings = cv2.cvtColor(markings, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(markings, 10, 20)
img_height = len(image_cv2)
img_width = len(image_cv2[0])
mask = numpy.zeros((img_height, img_width), dtype=numpy.uint8)
mask[pink_y:green_y, pink_x:green_x] = edges
kernel = numpy.ones((5,5),'uint8')
mask = cv2.dilate(mask, kernel)
# mask = cv2.erode(mask, kernel)
board_depth = self.depth_image[pink_y, pink_x]
# print "board depth = {0}".format(board_depth)
# print self.depth_image
# print numpy.where(self.depth_image <= board_depth - 0.2)
# http://stackoverflow.com/questions/432112/is-there-a-numpy-function-to-return-the-first-index-of-something-in-an-array
# for i in range(img_height):
# for j in range(img_width):
# if self.depth_image[i][j] <= board_depth - 0.25:
# mask[i][j] = 0
image_cv2 = cv2.inpaint(image_cv2, mask, 5, cv2.INPAINT_TELEA)
# cv2.rectangle(image_cv2, (green_x, green_y), (pink_x, pink_y), (0, 0, 0), 3)
except(ZeroDivisionError):
pass
# except(ZeroDivisionError, TypeError, AttributeError):
self.rcv.imshow(self.depth_image)
# self.rcv.imshow(image_cv2)
# Convert back to ROS Image msg
image_out = self.rcv.toRos(image_cv2)
self.pub.publish(image_out)
if __name__ == '__main__':
rospy.init_node('boundingBox')
boundingBox = Bound('/camera/rgb/image_color')
rospy.spin()
| {
"content_hash": "f0f19842b572786b05e00437fc5c44bc",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 132,
"avg_line_length": 39.392405063291136,
"alnum_prop": 0.5909383033419023,
"repo_name": "OSUrobotics/privacy-interfaces",
"id": "9ff07f676dee9c1243b30a7d0ad552c6ee4d1097",
"size": "3135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filtering/privacy/scripts/magic_board.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "83936"
},
{
"name": "C++",
"bytes": "1360235"
},
{
"name": "CMake",
"bytes": "46381"
},
{
"name": "Matlab",
"bytes": "2021"
},
{
"name": "Objective-C",
"bytes": "316"
},
{
"name": "Python",
"bytes": "364838"
}
],
"symlink_target": ""
} |
from nosuch.midiutil import *
from nosuch.midifile import *
from nosuch.oscutil import *
from nosuch.midiosc import *
from traceback import format_exc
from time import sleep
import sys
time0 = time.time()
def mycallback(ev,outfile):
global time0
tm = time.time()-time0
line = "[%.6f"%tm
for m in ev.oscmsg:
line = line + "," + str(m)
line = line + "]\n"
outfile.write(line)
if __name__ == '__main__':
if len(sys.argv) < 3:
print "Usage: oscrecord {port@addr} {outputfile}"
sys.exit(1)
input_name = sys.argv[1]
output_name = sys.argv[2]
port = re.compile(".*@").search(input_name).group()[:-1]
host = re.compile("@.*").search(input_name).group()[1:]
print "host=",host," port=",port," outputfile=",output_name
outfile = open(output_name,"w")
oscmon = OscMonitor(host,port)
oscmon.setcallback(mycallback,outfile)
sleep(3600) # an hour
| {
"content_hash": "152a19cc8df72412908ee76e1b0d4235",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 60,
"avg_line_length": 23.35135135135135,
"alnum_prop": 0.6678240740740741,
"repo_name": "nosuchtim/MultiMultiTouchTouch",
"id": "ae5f0c663284eb388385f4effd2580e31e523618",
"size": "864",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/oscutil/oscrecord.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6267"
},
{
"name": "C",
"bytes": "20934"
},
{
"name": "C++",
"bytes": "497843"
},
{
"name": "HTML",
"bytes": "61977"
},
{
"name": "JavaScript",
"bytes": "8557"
},
{
"name": "Makefile",
"bytes": "694"
},
{
"name": "Processing",
"bytes": "24705"
},
{
"name": "Python",
"bytes": "37325"
}
],
"symlink_target": ""
} |
from OvmCommonModule import *
from OVSSiteRMServer import get_master_ip, register_server
from OVSCommons import *
from OVSXMonitor import xen_get_xm_info
from OVSXSysInfo import get_agent_version
from OVSSiteRMServer import get_srv_agent_status
from OVSXMonitor import sys_perf_info
from OVSDB import db_get_vm
from OvmStoragePoolModule import OvmStoragePool
from OvmHaHeartBeatModule import OvmHaHeartBeat
import re
logger = OvmLogger('OvmHost')
class OvmHostEncoder(json.JSONEncoder):
def default(self, obj):
if not isinstance(obj, OvmHost): raise Exception("%s is not instance of OvmHost"%type(obj))
dct = {}
safeDictSet(obj, dct, 'masterIp')
safeDictSet(obj, dct, 'cpuNum')
safeDictSet(obj, dct, 'cpuSpeed')
safeDictSet(obj, dct, 'totalMemory')
safeDictSet(obj, dct, 'freeMemory')
safeDictSet(obj, dct, 'dom0Memory')
safeDictSet(obj, dct, 'agentVersion')
safeDictSet(obj, dct, 'name')
safeDictSet(obj, dct, 'dom0KernelVersion')
safeDictSet(obj, dct, 'hypervisorVersion')
return dct
def fromOvmHost(host):
return normalizeToGson(json.dumps(host, cls=OvmHostEncoder))
class OvmHost(OvmObject):
masterIp = ''
cpuNum = 0
cpuSpeed = 0
totalMemory = 0
freeMemory = 0
dom0Memory = 0
agentVersion = ''
name = ''
dom0KernelVersion = ''
hypervisorVersion = ''
def _getVmPathFromPrimaryStorage(self, vmName):
'''
we don't have a database to store vm states, so there is no way to retrieve information of a vm
when it was already stopped. The trick is to try to find the vm path in primary storage then we
can read information from its configure file.
'''
mps = OvmStoragePool()._getAllMountPoints()
vmPath = None
for p in mps:
vmPath = join(p, 'running_pool', vmName)
if exists(vmPath): break
if not vmPath:
logger.error(self._getVmPathFromPrimaryStorage, "Cannot find link for %s in any primary storage, the vm was really gone!"%vmName)
raise Exception("Cannot find link for %s in any primary storage, the vm was really gone!"%vmName)
return vmPath
def _vmNameToPath(self, vmName):
# the xen_get_vm_path always sucks!!!
#return successToMap((vmName))['path']
return self._getVmPathFromPrimaryStorage(vmName)
def _getAllDomains(self):
stdout = timeout_command(["xm", "list"])
l = [ line.split()[:2] for line in stdout.splitlines() ]
l = [ (name, id) for (name, id) in l if name not in ("Name", "Domain-0") ]
return l
def _getDomainIdByName(self, vmName):
l = self._getAllDomains()
for name, id in l:
if vmName == name: return id
raise NoVmFoundException("No domain id for %s found"%vmName)
@staticmethod
def registerAsMaster(hostname, username="oracle", password="password", port=8899, isSsl=False):
try:
logger.debug(OvmHost.registerAsMaster, "ip=%s, username=%s, password=%s, port=%s, isSsl=%s"%(hostname, username, password, port, isSsl))
exceptionIfNoSuccess(register_server(hostname, 'site', False, username, password, port, isSsl),
"Register %s as site failed"%hostname)
exceptionIfNoSuccess(register_server(hostname, 'utility', False, username, password, port, isSsl),
"Register %s as utility failed"%hostname)
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.registerAsMaster, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.registerAsMaster), errmsg)
@staticmethod
def registerAsVmServer(hostname, username="oracle", password="password", port=8899, isSsl=False):
try:
logger.debug(OvmHost.registerAsVmServer, "ip=%s, username=%s, password=%s, port=%s, isSsl=%s"%(hostname, username, password, port, isSsl))
exceptionIfNoSuccess(register_server(hostname, 'xen', False, username, password, port, isSsl),
"Register %s as site failed"%hostname)
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.registerAsVmServer, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.registerAsVmServer), errmsg)
@staticmethod
def ping(hostname):
try:
logger.debug(OvmHost.ping, "ping %s"%hostname)
exceptionIfNoSuccess(get_srv_agent_status(hostname), "Ovs agent is down")
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.ping, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.ping, errmsg))
@staticmethod
def getDetails():
try:
obj = OvmHost()
masterIp = successToMap(get_master_ip())
safeSetAttr(obj, 'masterIp', masterIp['ip'])
xmInfo = successToMap(xen_get_xm_info())
totalMemory = MtoBytes(long(xmInfo['total_memory']))
safeSetAttr(obj, 'totalMemory', totalMemory)
freeMemory = MtoBytes(long(xmInfo['free_memory']))
safeSetAttr(obj, 'freeMemory', freeMemory)
dom0Memory = totalMemory - freeMemory
safeSetAttr(obj, 'dom0Memory', dom0Memory)
cpuNum = int(xmInfo['nr_cpus'])
safeSetAttr(obj, 'cpuNum', cpuNum)
cpuSpeed = int(xmInfo['cpu_mhz'])
safeSetAttr(obj, 'cpuSpeed', cpuSpeed)
name = xmInfo['host']
safeSetAttr(obj, 'name', name)
dom0KernelVersion = xmInfo['release']
safeSetAttr(obj, 'dom0KernelVersion', dom0KernelVersion)
hypervisorVersion = xmInfo['xen_major'] + '.' + xmInfo['xen_minor'] + xmInfo['xen_extra']
safeSetAttr(obj, 'hypervisorVersion', hypervisorVersion)
agtVersion = successToMap(get_agent_version())
safeSetAttr(obj, 'agentVersion', agtVersion['agent_version'])
res = fromOvmHost(obj)
logger.debug(OvmHost.getDetails, res)
return res
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getDetails, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getDetails), errmsg)
@staticmethod
def getPerformanceStats(bridgeName):
try:
rxBytesPath = join("/sys/class/net/", bridgeName, "statistics/rx_bytes")
txBytesPath = join("/sys/class/net/", bridgeName, "statistics/tx_bytes")
if not exists(rxBytesPath): raise Exception("Cannot find %s"%rxBytesPath)
if not exists(txBytesPath): raise Exception("Cannot find %s"%txBytesPath)
rxBytes = long(doCmd(['cat', rxBytesPath])) / 1000
txBytes = long(doCmd(['cat', txBytesPath])) / 1000
sysPerf = successToMap(sys_perf_info())
cpuUtil = float(100 - float(sysPerf['cpu_idle']) * 100)
freeMemory = MtoBytes(long(sysPerf['mem_free']))
xmInfo = successToMap(xen_get_xm_info())
totalMemory = MtoBytes(long(xmInfo['total_memory']))
rs = toGson({"cpuUtil":cpuUtil, "totalMemory":totalMemory, "freeMemory":freeMemory, "rxBytes":rxBytes, "txBytes":txBytes})
logger.info(OvmHost.getPerformanceStats, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getPerformanceStats, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getPerformanceStats), errmsg)
@staticmethod
def getAllVms():
def scanStoppedVmOnPrimaryStorage(vms):
def isMyVmDirLink(path):
return (islink(path) and exists(join(path, 'vm.cfg')) and ('-' in basename(path)) and (exists(join(path, makeOwnerFileName()))))
mps = OvmStoragePool()._getAllMountPoints()
for mountPoint in mps:
runningPool = join(mountPoint, 'running_pool')
if not exists(runningPool):
logger.debug(OvmHost.getAllVms, "Primary storage %s not existing, skip it. this should be first getAllVms() called from Ovm resource configure"%runningPool)
continue
for dir in os.listdir(runningPool):
vmDir = join(runningPool, dir)
if not isMyVmDirLink(vmDir):
logger.debug(OvmHost.getAllVms, "%s is not our vm directory, skip it"%vmDir)
continue
if vms.has_key(dir):
logger.debug(OvmHost.getAllVms, "%s is already in running list, skip it"%dir)
continue
logger.debug(OvmHost.getAllVms, "Found a stopped vm %s on primary storage %s, report it to management server" % (dir, mountPoint))
vms[dir] = "DOWN"
try:
l = OvmHost()._getAllDomains()
dct = {}
host = OvmHost()
for name, id in l:
try:
vmPath = host._getVmPathFromPrimaryStorage(name)
vmStatus = db_get_vm(vmPath)
dct[name] = vmStatus['status']
except Exception, e:
logger.debug(OvmHost.getAllVms, "Cannot find link for %s on primary storage, treat it as Error"%name)
dct[name] = 'ERROR'
scanStoppedVmOnPrimaryStorage(dct)
rs = toGson(dct)
logger.info(OvmHost.getAllVms, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getAllVms, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getAllVms), errmsg)
@staticmethod
def fence(ip):
# try 3 times to avoid race condition that read when heartbeat file is being written
def getTimeStamp(hbFile):
for i in range(1, 3):
f = open(hbFile, 'r')
str = f.readline()
items = re.findall(HEARTBEAT_TIMESTAMP_PATTERN, str)
if len(items) == 0:
logger.debug(OvmHost.fence, "Get an incorrect heartbeat data %s, will retry %s times" % (str, 3-i))
f.close()
time.sleep(5)
else:
f.close()
timestamp = items[0]
return timestamp.lstrip('<timestamp>').rstrip('</timestamp>')
# totally check in 6 mins, the update frequency is 2 mins
def check(hbFile):
for i in range(1, 6):
ts = getTimeStamp(hbFile)
time.sleep(60)
nts = getTimeStamp(hbFile)
if ts != nts: return True
else: logger.debug(OvmHost.fence, '%s is not updated, old value=%s, will retry %s times'%(hbFile, ts, 6-i))
return False
try:
mountpoints = OvmStoragePool()._getAllMountPoints()
hbFile = None
for m in mountpoints:
p = join(m, HEARTBEAT_DIR, ipToHeartBeatFileName(ip))
if exists(p):
hbFile = p
break
if not hbFile: raise Exception('Can not find heartbeat file for %s in pools %s'%(ip, mountpoints))
rs = toGson({"isLive":check(hbFile)})
logger.debug(OvmHost.fence, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.fence, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.fence), errmsg)
@staticmethod
def setupHeartBeat(poolUuid, ip):
try:
sr = OvmStoragePool()._getSrByNameLable(poolUuid)
OvmHaHeartBeat.start(sr.mountpoint, ip)
return SUCC()
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.setupHeartBeat, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.setupHeartBeat), errmsg)
@staticmethod
def pingAnotherHost(ip):
try:
doCmd(['ping', '-c', '1', '-n', '-q', ip])
return SUCC()
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.pingAnotherHost, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.pingAnotherHost), errmsg)
if __name__ == "__main__":
print OvmHost.getAllVms() | {
"content_hash": "e68181413b3cf0c088bbe04ce5601981",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 176,
"avg_line_length": 44.5,
"alnum_prop": 0.578767919411081,
"repo_name": "cinderella/incubator-cloudstack",
"id": "3c61500630d7aef191c5cc95d41a5be784740748",
"size": "13693",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "plugins/hypervisors/ovm/scripts/vm/hypervisor/ovm/OvmHostModule.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "30051196"
},
{
"name": "JavaScript",
"bytes": "2530360"
},
{
"name": "Perl",
"bytes": "184903"
},
{
"name": "Python",
"bytes": "2076305"
},
{
"name": "Shell",
"bytes": "471280"
}
],
"symlink_target": ""
} |
from horizon import views
class IndexView(views.APIView):
# A very simple class-based view...
template_name = 'monitor/regulation/index.html'
def get_data(self, request, context, *args, **kwargs):
# Add data to the context here...
return context
| {
"content_hash": "f30f7281c2a7339d88346105b1eebf59",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 58,
"avg_line_length": 27.7,
"alnum_prop": 0.6678700361010831,
"repo_name": "zouyapeng/horizon_change",
"id": "d7b11a100ae9d260617d3bf716ae9b58eaff9c2f",
"size": "277",
"binary": false,
"copies": "2",
"ref": "refs/heads/juno",
"path": "openstack_dashboard/dashboards/monitor/regulation/views.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2270222"
},
{
"name": "HTML",
"bytes": "427249"
},
{
"name": "JavaScript",
"bytes": "270670"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "4048852"
},
{
"name": "Shell",
"bytes": "17483"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class RentalsConfig(AppConfig):
name = 'rentals'
| {
"content_hash": "5c5d330e9abc2e6ee514130c9c40fbda",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 17.8,
"alnum_prop": 0.7528089887640449,
"repo_name": "s-take/myequipment",
"id": "282370175c336a7f49c4791edf522754bf4d7718",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rentals/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1744"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "12880"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import sys
import os
import tempfile
import time
import multiprocessing as mp
import unittest
import random
import mxnet as mx
import numpy as np
import unittest
import math
from nose.tools import assert_raises
from mxnet.test_utils import check_consistency, set_default_context, assert_almost_equal
from mxnet.base import MXNetError
from mxnet import autograd
from numpy.testing import assert_allclose
from mxnet.test_utils import rand_ndarray
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.insert(0, os.path.join(curr_path, '../unittest'))
from common import setup_module, with_seed, teardown, assert_raises_cudnn_not_satisfied
from test_gluon import *
from test_loss import *
from test_gluon_rnn import *
set_default_context(mx.gpu(0))
def check_rnn_layer(layer):
layer.collect_params().initialize(ctx=[mx.cpu(0), mx.gpu(0)])
with mx.gpu(0):
x = mx.nd.ones((10, 16, 30))
states = layer.begin_state(16)
go, gs = layer(x, states)
with mx.cpu(0):
x = mx.nd.ones((10, 16, 30))
states = layer.begin_state(16)
co, cs = layer(x, states)
# atol of 1e-6 required, as exposed by seed 2124685726
assert_almost_equal(go.asnumpy(), co.asnumpy(), rtol=1e-2, atol=1e-6)
for g, c in zip(gs, cs):
assert_almost_equal(g.asnumpy(), c.asnumpy(), rtol=1e-2, atol=1e-6)
@with_seed()
def check_rnn_layer_w_rand_inputs(layer):
layer.collect_params().initialize(ctx=[mx.cpu(0), mx.gpu(0)])
x = mx.nd.uniform(shape=(10, 16, 30))
with mx.gpu(0):
x = x.copyto(mx.gpu(0))
states = layer.begin_state(16)
go, gs = layer(x, states)
with mx.cpu(0):
x = x.copyto(mx.cpu(0))
states = layer.begin_state(16)
co, cs = layer(x, states)
assert_almost_equal(go.asnumpy(), co.asnumpy(), rtol=1e-2, atol=1e-6)
for g, c in zip(gs, cs):
assert_almost_equal(g.asnumpy(), c.asnumpy(), rtol=1e-2, atol=1e-6)
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='7.2.1')
def test_lstmp():
hidden_size, projection_size = 3, 2
rtol, atol = 1e-2, 1e-2
batch_size, seq_len = 7, 11
input_size = 5
lstm_input = mx.nd.uniform(shape=(seq_len, batch_size, input_size), ctx=mx.gpu(0))
shapes = {'i2h_weight': (hidden_size*4, input_size),
'h2h_weight': (hidden_size*4, projection_size),
'i2h_bias': (hidden_size*4,),
'h2h_bias': (hidden_size*4,),
'h2r_weight': (projection_size, hidden_size)}
weights = {k: rand_ndarray(v) for k, v in shapes.items()}
lstm_layer = gluon.rnn.LSTM(hidden_size, projection_size=projection_size,
input_size=input_size, prefix='lstm0_')
lstm_cell = gluon.contrib.rnn.LSTMPCell(hidden_size=hidden_size,
projection_size=projection_size,
input_size=input_size,
prefix='lstm0_l0_')
lstm_layer.initialize(ctx=mx.gpu(0))
lstm_cell.initialize(ctx=mx.gpu(0))
layer_params = lstm_layer.collect_params()
cell_params = lstm_cell.collect_params()
for k, v in weights.items():
layer_params['lstm0_l0_'+k].set_data(v.copy())
cell_params['lstm0_l0_'+k].set_data(v.copy())
with autograd.record():
layer_output = lstm_layer(lstm_input.copy())
cell_output = lstm_cell.unroll(seq_len, lstm_input.copy(), layout='TNC',
merge_outputs=True)[0]
assert_almost_equal(layer_output.asnumpy(), cell_output.asnumpy(), rtol=rtol, atol=atol)
layer_output.backward()
cell_output.backward()
for k, v in weights.items():
layer_grad = layer_params['lstm0_l0_'+k].grad()
cell_grad = cell_params['lstm0_l0_'+k].grad()
print('checking gradient for {}'.format('lstm0_l0_'+k))
assert_almost_equal(layer_grad.asnumpy(), cell_grad.asnumpy(),
rtol=rtol, atol=atol)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, projection_size=5), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, projection_size=5, bidirectional=True), mx.nd.ones((8, 3, 20)), [mx.nd.ones((4, 3, 5)), mx.nd.ones((4, 3, 10))])
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, dropout=0.5, projection_size=5), mx.nd.ones((8, 3, 20)),
run_only=True)
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2, bidirectional=True, dropout=0.5, projection_size=5),
mx.nd.ones((8, 3, 20)),
[mx.nd.ones((4, 3, 5)), mx.nd.ones((4, 3, 10))], run_only=True)
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='7.2.1')
def test_lstm_clip():
hidden_size, projection_size = 4096, 2048
batch_size, seq_len = 32, 80
input_size = 50
clip_min, clip_max, clip_nan = -5, 5, True
lstm_input = mx.nd.uniform(shape=(seq_len, batch_size, input_size), ctx=mx.gpu(0))
lstm_states = [mx.nd.uniform(shape=(2, batch_size, projection_size), ctx=mx.gpu(0)),
mx.nd.uniform(shape=(2, batch_size, hidden_size), ctx=mx.gpu(0))]
lstm_layer = gluon.rnn.LSTM(hidden_size, projection_size=projection_size,
input_size=input_size, prefix='lstm0_',
bidirectional=True,
state_clip_min=clip_min,
state_clip_max=clip_max,
state_clip_nan=clip_nan)
lstm_layer.initialize(ctx=mx.gpu(0))
with autograd.record():
_, layer_output_states = lstm_layer(lstm_input, lstm_states)
cell_states = layer_output_states[0].asnumpy()
assert (cell_states >= clip_min).all() and (cell_states <= clip_max).all()
assert not np.isnan(cell_states).any()
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_rnn_layer():
check_rnn_layer(gluon.rnn.RNN(100, num_layers=3))
check_rnn_layer(gluon.rnn.RNN(100, activation='tanh', num_layers=3))
check_rnn_layer(gluon.rnn.LSTM(100, num_layers=3))
check_rnn_layer(gluon.rnn.GRU(100, num_layers=3))
check_rnn_layer(gluon.rnn.LSTM(100, num_layers=3, bidirectional=True))
check_rnn_layer_w_rand_inputs(gluon.rnn.LSTM(100, num_layers=3, bidirectional=True))
def check_layer_bidirectional(size, in_size, proj_size):
class RefBiLSTM(gluon.Block):
def __init__(self, size, proj_size, **kwargs):
super(RefBiLSTM, self).__init__(**kwargs)
with self.name_scope():
self._lstm_fwd = gluon.rnn.LSTM(size, projection_size=proj_size, bidirectional=False, prefix='l0')
self._lstm_bwd = gluon.rnn.LSTM(size, projection_size=proj_size, bidirectional=False, prefix='r0')
def forward(self, inpt):
fwd = self._lstm_fwd(inpt)
bwd_inpt = nd.flip(inpt, 0)
bwd = self._lstm_bwd(bwd_inpt)
bwd = nd.flip(bwd, 0)
return nd.concat(fwd, bwd, dim=2)
weights = {}
for d in ['l', 'r']:
weights['lstm_{}0_i2h_weight'.format(d)] = mx.random.uniform(shape=(size*4, in_size))
if proj_size:
weights['lstm_{}0_h2h_weight'.format(d)] = mx.random.uniform(shape=(size*4, proj_size))
weights['lstm_{}0_h2r_weight'.format(d)] = mx.random.uniform(shape=(proj_size, size))
else:
weights['lstm_{}0_h2h_weight'.format(d)] = mx.random.uniform(shape=(size*4, size))
weights['lstm_{}0_i2h_bias'.format(d)] = mx.random.uniform(shape=(size*4,))
weights['lstm_{}0_h2h_bias'.format(d)] = mx.random.uniform(shape=(size*4,))
net = gluon.rnn.LSTM(size, projection_size=proj_size, bidirectional=True, prefix='lstm_')
ref_net = RefBiLSTM(size, proj_size, prefix='lstm_')
net.initialize()
ref_net.initialize()
net_params = net.collect_params()
ref_net_params = ref_net.collect_params()
for k in weights:
net_params[k].set_data(weights[k])
ref_net_params[k.replace('l0', 'l0l0').replace('r0', 'r0l0')].set_data(weights[k])
data = mx.random.uniform(shape=(11, 10, in_size))
assert_allclose(net(data).asnumpy(), ref_net(data).asnumpy())
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_layer_bidirectional():
check_layer_bidirectional(7, 5, 0)
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='7.2.1')
def test_layer_bidirectional_proj():
check_layer_bidirectional(7, 5, 3)
@with_seed()
@assert_raises_cudnn_not_satisfied(min_version='5.1.10')
def test_rnn_layer_begin_state_type():
fake_data = nd.random.uniform(shape=(3, 5, 7), dtype='float16')
modeling_layer = gluon.rnn.LSTM(hidden_size=11, num_layers=2, dropout=0.2, bidirectional=True)
modeling_layer.cast('float16')
modeling_layer.initialize()
modeling_layer(fake_data)
def test_gluon_ctc_consistency():
loss = mx.gluon.loss.CTCLoss()
data = mx.nd.arange(0, 4, repeat=40, ctx=mx.gpu(0)).reshape((2,20,4)).flip(axis=0)
cpu_label = mx.nd.array([[2,1,-1,-1],[3,2,2,-1]], ctx=mx.cpu(0))
gpu_label = mx.nd.array([[2,1,-1,-1],[3,2,2,-1]], ctx=mx.gpu(0))
cpu_data = data.copy().as_in_context(mx.cpu(0))
cpu_data.attach_grad()
with mx.autograd.record():
l_cpu = loss(cpu_data, cpu_label)
l_cpu.backward()
gpu_data = data.copyto(mx.gpu(0))
gpu_data.attach_grad()
with mx.autograd.record():
l_gpu = loss(gpu_data, gpu_label)
l_gpu.backward()
assert_almost_equal(cpu_data.grad.asnumpy(), gpu_data.grad.asnumpy(), atol=1e-3, rtol=1e-3)
@with_seed()
def test_global_norm_clip_multi_device():
for check_isfinite in [True, False]:
x1 = mx.nd.ones((3,3), ctx=mx.gpu(0))
x2 = mx.nd.ones((4,4), ctx=mx.cpu(0))
norm = gluon.utils.clip_global_norm([x1, x2], 1.0, check_isfinite=check_isfinite)
if check_isfinite:
assert norm == 5.0
else:
assert norm.asscalar() == 5.0
assert_almost_equal(x1.asnumpy(), np.ones((3, 3)) / 5)
assert_almost_equal(x2.asnumpy(), np.ones((4, 4)) / 5)
def _check_batchnorm_result(input, num_devices=1, cuda=False):
from mxnet.gluon.utils import split_and_load
def _find_bn(module):
if isinstance(module, (mx.gluon.nn.BatchNorm, mx.gluon.contrib.nn.SyncBatchNorm)):
return module
elif isinstance(module.module, (mx.gluon.nn.BatchNorm, mx.gluon.contrib.nn.SyncBatchNorm)):
return module.module
raise RuntimeError('BN not found')
def _syncParameters(bn1, bn2, ctx):
ctx = input.context
bn2.gamma.set_data(bn1.gamma.data(ctx))
bn2.beta.set_data(bn1.beta.data(ctx))
bn2.running_mean.set_data(bn1.running_mean.data(ctx))
bn2.running_var.set_data(bn1.running_var.data(ctx))
input1 = input.copy()
input2 = input.copy()
if cuda:
input1 = input.as_in_context(mx.gpu(0))
ctx_list = [mx.gpu(i) for i in range(num_devices)]
else:
ctx_list = [mx.cpu(0) for _ in range(num_devices)]
nch = input.shape[1]
bn1 = mx.gluon.nn.BatchNorm(in_channels=nch)
bn2 = mx.gluon.contrib.nn.SyncBatchNorm(in_channels=nch, num_devices=num_devices)
bn1.initialize(ctx=ctx_list[0])
bn2.initialize(ctx=ctx_list)
# using the same values for gamma and beta
#_syncParameters(_find_bn(bn1), _find_bn(bn2), ctx_list[0])
input1.attach_grad()
inputs2 = split_and_load(input2, ctx_list, batch_axis=0)
for xi in inputs2:
xi.attach_grad()
with mx.autograd.record():
output1 = bn1(input1)
output2 = [bn2(xi) for xi in inputs2]
loss1 = (output1 ** 2).sum()
loss2 = [(output ** 2).sum() for output in output2]
mx.autograd.backward(loss1)
mx.autograd.backward(loss2)
output2 = mx.nd.concat(*[output.as_in_context(input.context) for output in output2], dim=0)
# assert forwarding
assert_almost_equal(input1.asnumpy(), input2.asnumpy(), atol=1e-3, rtol=1e-3)
assert_almost_equal(output1.asnumpy(), output2.asnumpy(), atol=1e-3, rtol=1e-3)
assert_almost_equal(_find_bn(bn1).running_mean.data(ctx_list[0]).asnumpy(),
_find_bn(bn2).running_mean.data(ctx_list[0]).asnumpy(),
atol=1e-3, rtol=1e-3)
assert_almost_equal(_find_bn(bn1).running_var.data(ctx_list[0]).asnumpy(),
_find_bn(bn2).running_var.data(ctx_list[0]).asnumpy(),
atol=1e-3, rtol=1e-3)
input2grad = mx.nd.concat(*[output.grad.as_in_context(input.context) for output in inputs2], dim=0)
assert_almost_equal(input1.grad.asnumpy(), input2grad.asnumpy(), atol=1e-3, rtol=1e-3)
@with_seed()
def test_sync_batchnorm():
def get_num_devices():
for i in range(100):
try:
mx.nd.zeros((1,), ctx=mx.gpu(i))
except:
return i
# no need to use SyncBN with 1 gpu
if get_num_devices() < 2:
return
ndev = 2
# check with unsync version
for i in range(10):
_check_batchnorm_result(mx.nd.random.uniform(shape=(4, 1, 4, 4)),
num_devices=ndev, cuda=True)
@with_seed()
def test_symbol_block_fp16():
# Test case to verify if initializing the SymbolBlock from a model with params
# other than fp32 param dtype.
# 1. Load a resnet model, cast it to fp16 and export
tmp = tempfile.mkdtemp()
tmpfile = os.path.join(tmp, 'resnet34_fp16')
ctx = mx.gpu(0)
net_fp32 = mx.gluon.model_zoo.vision.resnet34_v2(pretrained=True, ctx=ctx, root=tmp)
net_fp32.cast('float16')
net_fp32.hybridize()
data = mx.nd.zeros((1,3,224,224), dtype='float16', ctx=ctx)
net_fp32.forward(data)
net_fp32.export(tmpfile, 0)
# 2. Load the saved model and verify if all the params are loaded correctly.
# and choose one of the param to verify the type if fp16.
sm = mx.sym.load(tmpfile + '-symbol.json')
inputs = mx.sym.var('data', dtype='float16')
net_fp16 = mx.gluon.SymbolBlock(sm, inputs)
net_fp16.collect_params().load(tmpfile + '-0000.params', ctx=ctx)
# 3. Get a conv layer's weight parameter name. Conv layer's weight param is
# expected to be of dtype casted, fp16.
for param_name in net_fp16.params.keys():
if 'conv' in param_name and 'weight' in param_name:
break
assert np.dtype(net_fp16.params[param_name].dtype) == np.dtype(np.float16)
@with_seed()
def test_large_models():
ctx = default_context()
# Create model
net = gluon.nn.HybridSequential()
largest_num_features = 256
with net.name_scope():
net.add(nn.Conv2D(largest_num_features, 3))
net.hybridize()
net.initialize(mx.init.Normal(sigma=0.01), ctx=ctx)
# Compute the height (=width) of the square tensor of the given size in bytes
def tensor_size(big_tensor_bytes):
bytes_per_float = 4
sz = int(math.sqrt(big_tensor_bytes / largest_num_features / bytes_per_float))
return (sz // 100) * 100
# The idea is to create models with large tensors of (say) 20% of the total memory.
# This in the past has given cudnnFind() trouble when it needed to allocate similar I/O's
# from the area carved out by the MXNET_GPU_MEM_POOL_RESERVE setting (by default 5%).
(free_mem_bytes, total_mem_bytes) = mx.context.gpu_memory_info(ctx.device_id)
start_size = tensor_size(0.20 * total_mem_bytes)
num_trials = 10
sys.stderr.write(' testing global memory of size {} ... '.format(total_mem_bytes))
sys.stderr.flush()
for i in range(num_trials):
sz = start_size - 10 * i
(height, width) = (sz,sz)
sys.stderr.write(" {}x{} ".format(height,width))
sys.stderr.flush()
data_in = nd.random_uniform(low=0, high=255, shape=(1, 3, height, width),
ctx=ctx, dtype="float32")
# Evaluate model
net(data_in).asnumpy()
if __name__ == '__main__':
import nose
nose.runmodule()
| {
"content_hash": "8d0a62bc1c4badf73aeb8fdb2f1934b6",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 162,
"avg_line_length": 40.72544080604534,
"alnum_prop": 0.6147946561108362,
"repo_name": "ptrendx/mxnet",
"id": "54bfcee4734778f68ecb6cc9c2da8730536ca55b",
"size": "16954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/python/gpu/test_gluon_gpu.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "174781"
},
{
"name": "C++",
"bytes": "6495727"
},
{
"name": "CMake",
"bytes": "90332"
},
{
"name": "Clojure",
"bytes": "503982"
},
{
"name": "Cuda",
"bytes": "838222"
},
{
"name": "Dockerfile",
"bytes": "61391"
},
{
"name": "Groovy",
"bytes": "97961"
},
{
"name": "HTML",
"bytes": "40277"
},
{
"name": "Java",
"bytes": "181373"
},
{
"name": "Julia",
"bytes": "427299"
},
{
"name": "Jupyter Notebook",
"bytes": "3613882"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "178070"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "11478"
},
{
"name": "Python",
"bytes": "6375321"
},
{
"name": "R",
"bytes": "357740"
},
{
"name": "Scala",
"bytes": "1256104"
},
{
"name": "Shell",
"bytes": "397784"
},
{
"name": "Smalltalk",
"bytes": "3497"
}
],
"symlink_target": ""
} |
from copy import deepcopy
from typing import Any, Awaitable, TYPE_CHECKING
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from .. import models
from ..._serialization import Deserializer, Serializer
from ._configuration import ComputeManagementClientConfiguration
from .operations import (
AvailabilitySetsOperations,
CapacityReservationGroupsOperations,
CapacityReservationsOperations,
DedicatedHostGroupsOperations,
DedicatedHostsOperations,
ImagesOperations,
LogAnalyticsOperations,
Operations,
ProximityPlacementGroupsOperations,
RestorePointCollectionsOperations,
RestorePointsOperations,
SshPublicKeysOperations,
UsageOperations,
VirtualMachineExtensionImagesOperations,
VirtualMachineExtensionsOperations,
VirtualMachineImagesEdgeZoneOperations,
VirtualMachineImagesOperations,
VirtualMachineRunCommandsOperations,
VirtualMachineScaleSetExtensionsOperations,
VirtualMachineScaleSetRollingUpgradesOperations,
VirtualMachineScaleSetVMExtensionsOperations,
VirtualMachineScaleSetVMRunCommandsOperations,
VirtualMachineScaleSetVMsOperations,
VirtualMachineScaleSetsOperations,
VirtualMachineSizesOperations,
VirtualMachinesOperations,
)
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ComputeManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
"""Compute Client.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.compute.v2022_08_01.aio.operations.Operations
:ivar usage: UsageOperations operations
:vartype usage: azure.mgmt.compute.v2022_08_01.aio.operations.UsageOperations
:ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
:vartype virtual_machine_sizes:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineSizesOperations
:ivar virtual_machine_scale_sets: VirtualMachineScaleSetsOperations operations
:vartype virtual_machine_scale_sets:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetsOperations
:ivar virtual_machine_scale_set_extensions: VirtualMachineScaleSetExtensionsOperations
operations
:vartype virtual_machine_scale_set_extensions:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetExtensionsOperations
:ivar virtual_machine_scale_set_rolling_upgrades:
VirtualMachineScaleSetRollingUpgradesOperations operations
:vartype virtual_machine_scale_set_rolling_upgrades:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetRollingUpgradesOperations
:ivar virtual_machine_scale_set_vm_extensions: VirtualMachineScaleSetVMExtensionsOperations
operations
:vartype virtual_machine_scale_set_vm_extensions:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetVMExtensionsOperations
:ivar virtual_machine_scale_set_vms: VirtualMachineScaleSetVMsOperations operations
:vartype virtual_machine_scale_set_vms:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetVMsOperations
:ivar virtual_machine_extensions: VirtualMachineExtensionsOperations operations
:vartype virtual_machine_extensions:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineExtensionsOperations
:ivar virtual_machines: VirtualMachinesOperations operations
:vartype virtual_machines:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachinesOperations
:ivar virtual_machine_images: VirtualMachineImagesOperations operations
:vartype virtual_machine_images:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineImagesOperations
:ivar virtual_machine_images_edge_zone: VirtualMachineImagesEdgeZoneOperations operations
:vartype virtual_machine_images_edge_zone:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineImagesEdgeZoneOperations
:ivar virtual_machine_extension_images: VirtualMachineExtensionImagesOperations operations
:vartype virtual_machine_extension_images:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineExtensionImagesOperations
:ivar availability_sets: AvailabilitySetsOperations operations
:vartype availability_sets:
azure.mgmt.compute.v2022_08_01.aio.operations.AvailabilitySetsOperations
:ivar proximity_placement_groups: ProximityPlacementGroupsOperations operations
:vartype proximity_placement_groups:
azure.mgmt.compute.v2022_08_01.aio.operations.ProximityPlacementGroupsOperations
:ivar dedicated_host_groups: DedicatedHostGroupsOperations operations
:vartype dedicated_host_groups:
azure.mgmt.compute.v2022_08_01.aio.operations.DedicatedHostGroupsOperations
:ivar dedicated_hosts: DedicatedHostsOperations operations
:vartype dedicated_hosts:
azure.mgmt.compute.v2022_08_01.aio.operations.DedicatedHostsOperations
:ivar ssh_public_keys: SshPublicKeysOperations operations
:vartype ssh_public_keys: azure.mgmt.compute.v2022_08_01.aio.operations.SshPublicKeysOperations
:ivar images: ImagesOperations operations
:vartype images: azure.mgmt.compute.v2022_08_01.aio.operations.ImagesOperations
:ivar restore_point_collections: RestorePointCollectionsOperations operations
:vartype restore_point_collections:
azure.mgmt.compute.v2022_08_01.aio.operations.RestorePointCollectionsOperations
:ivar restore_points: RestorePointsOperations operations
:vartype restore_points: azure.mgmt.compute.v2022_08_01.aio.operations.RestorePointsOperations
:ivar capacity_reservation_groups: CapacityReservationGroupsOperations operations
:vartype capacity_reservation_groups:
azure.mgmt.compute.v2022_08_01.aio.operations.CapacityReservationGroupsOperations
:ivar capacity_reservations: CapacityReservationsOperations operations
:vartype capacity_reservations:
azure.mgmt.compute.v2022_08_01.aio.operations.CapacityReservationsOperations
:ivar log_analytics: LogAnalyticsOperations operations
:vartype log_analytics: azure.mgmt.compute.v2022_08_01.aio.operations.LogAnalyticsOperations
:ivar virtual_machine_run_commands: VirtualMachineRunCommandsOperations operations
:vartype virtual_machine_run_commands:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineRunCommandsOperations
:ivar virtual_machine_scale_set_vm_run_commands: VirtualMachineScaleSetVMRunCommandsOperations
operations
:vartype virtual_machine_scale_set_vm_run_commands:
azure.mgmt.compute.v2022_08_01.aio.operations.VirtualMachineScaleSetVMRunCommandsOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call. Required.
:type subscription_id: str
:param base_url: Service URL. Default value is "https://management.azure.com".
:type base_url: str
:keyword api_version: Api Version. Default value is "2022-08-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ComputeManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.usage = UsageOperations(self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_sizes = VirtualMachineSizesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_sets = VirtualMachineScaleSetsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_set_extensions = VirtualMachineScaleSetExtensionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_set_rolling_upgrades = VirtualMachineScaleSetRollingUpgradesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_set_vm_extensions = VirtualMachineScaleSetVMExtensionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_set_vms = VirtualMachineScaleSetVMsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_extensions = VirtualMachineExtensionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machines = VirtualMachinesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_images = VirtualMachineImagesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_images_edge_zone = VirtualMachineImagesEdgeZoneOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_extension_images = VirtualMachineExtensionImagesOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.availability_sets = AvailabilitySetsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.proximity_placement_groups = ProximityPlacementGroupsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.dedicated_host_groups = DedicatedHostGroupsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.dedicated_hosts = DedicatedHostsOperations(self._client, self._config, self._serialize, self._deserialize)
self.ssh_public_keys = SshPublicKeysOperations(self._client, self._config, self._serialize, self._deserialize)
self.images = ImagesOperations(self._client, self._config, self._serialize, self._deserialize)
self.restore_point_collections = RestorePointCollectionsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.restore_points = RestorePointsOperations(self._client, self._config, self._serialize, self._deserialize)
self.capacity_reservation_groups = CapacityReservationGroupsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.capacity_reservations = CapacityReservationsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.log_analytics = LogAnalyticsOperations(self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_run_commands = VirtualMachineRunCommandsOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.virtual_machine_scale_set_vm_run_commands = VirtualMachineScaleSetVMRunCommandsOperations(
self._client, self._config, self._serialize, self._deserialize
)
def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "ComputeManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| {
"content_hash": "ab72e1713c9107617323832d1ec368d9",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 119,
"avg_line_length": 54.806451612903224,
"alnum_prop": 0.7496321365509123,
"repo_name": "Azure/azure-sdk-for-python",
"id": "5aa07c3b3d0f924f67f85db0f397fea51a084e0b",
"size": "14060",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2022_08_01/aio/_compute_management_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""Custom application specific yamls tags are supported to provide
enhancements when reading yaml configuration.
These allow inclusion of arbitrary files as a method of having blocks of data
managed separately to the yaml job configurations. A specific usage of this is
inlining scripts contained in separate files, although such tags may also be
used to simplify usage of macros or job templates.
The tag ``!include`` will treat the following string as file which should be
parsed as yaml configuration data.
Example:
.. literalinclude:: /../../tests/localyaml/fixtures/include001.yaml
The tag ``!include-raw`` will treat the following file as a data blob, which
should be read into the calling yaml construct without any further parsing.
Any data in a file included through this tag, will be treated as string data.
Example:
.. literalinclude:: /../../tests/localyaml/fixtures/include-raw001.yaml
The tag ``!include-raw-escape`` treats the given file as a data blob, which
should be escaped before being read in as string data. This allows
job-templates to use this tag to include scripts from files without
needing to escape braces in the original file.
Example:
.. literalinclude::
/../../tests/localyaml/fixtures/include-raw-escaped001.yaml
"""
import codecs
import functools
import logging
import re
import os
import yaml
logger = logging.getLogger(__name__)
class LocalLoader(yaml.Loader):
"""Subclass for yaml.Loader which handles the local tags 'include',
'include-raw' and 'include-raw-escaped' to specify a file to include data
from and whether to parse it as additional yaml, treat it as a data blob
or additionally escape the data contained. These are specified in yaml
files by "!include path/to/file.yaml".
Constructor access a list of search paths to look under for the given
file following each tag, taking the first match found. Search path by
default will include the same directory as the yaml file and the current
working directory.
Loading::
# use the load function provided in this module
import local_yaml
data = local_yaml.load(open(fn))
# Loading by providing the alternate class to the default yaml load
from local_yaml import LocalLoader
data = yaml.load(open(fn), LocalLoader)
# Loading with a search path
from local_yaml import LocalLoader
import functools
data = yaml.load(open(fn), functools.partial(LocalLoader,
search_path=['path']))
"""
def __init__(self, *args, **kwargs):
# make sure to pop off any local settings before passing to
# the parent constructor as any unknown args may cause errors.
self.search_path = set()
if 'search_path' in kwargs:
for p in kwargs.pop('search_path'):
logger.debug("Adding '{0}' to search path for include tags"
.format(p))
self.search_path.add(os.path.normpath(p))
if 'escape_callback' in kwargs:
self._escape = kwargs.pop('escape_callback')
super(LocalLoader, self).__init__(*args, **kwargs)
# Add tag constructors
self.add_constructor('!include', self._include_tag)
self.add_constructor('!include-raw', self._include_raw_tag)
self.add_constructor('!include-raw-escape',
self._include_raw_escape_tag)
if isinstance(self.stream, file):
self.search_path.add(os.path.normpath(
os.path.dirname(self.stream.name)))
self.search_path.add(os.path.normpath(os.path.curdir))
def _find_file(self, filename):
for dirname in self.search_path:
candidate = os.path.expanduser(os.path.join(dirname, filename))
if os.path.isfile(candidate):
logger.info("Including file '{0}' from path '{0}'"
.format(filename, dirname))
return candidate
return filename
def _include_tag(self, loader, node):
filename = self._find_file(loader.construct_yaml_str(node))
with open(filename, 'r') as f:
data = yaml.load(f, functools.partial(LocalLoader,
search_path=self.search_path
))
return data
def _include_raw_tag(self, loader, node):
filename = self._find_file(loader.construct_yaml_str(node))
try:
with codecs.open(filename, 'r', 'utf-8') as f:
data = f.read()
except:
logger.error("Failed to include file using search path: '{0}'"
.format(':'.join(self.search_path)))
raise
return data
def _include_raw_escape_tag(self, loader, node):
return self._escape(self._include_raw_tag(loader, node))
def _escape(self, data):
return re.sub(r'({|})', r'\1\1', data)
def load(stream, **kwargs):
return yaml.load(stream, functools.partial(LocalLoader, **kwargs))
| {
"content_hash": "1a60f6750848f0458f34bb4377d41328",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 78,
"avg_line_length": 35.83916083916084,
"alnum_prop": 0.6394146341463415,
"repo_name": "varunarya10/jenkins-job-builder",
"id": "27df904aaf52853984f5cd5cb47bc0a9a8ff09ca",
"size": "5790",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "jenkins_jobs/local_yaml.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "51"
},
{
"name": "C++",
"bytes": "391"
},
{
"name": "Python",
"bytes": "464825"
},
{
"name": "Shell",
"bytes": "869"
}
],
"symlink_target": ""
} |
#!/usr/bin/python
# -*- coding: utf-8; -*-
#
# Copyright (C) 2011 Google Inc. All rights reserved.
# Copyright (C) 2009 Torch Mobile Inc.
# Copyright (C) 2009 Apple Inc. All rights reserved.
# Copyright (C) 2010 Chris Jerdonek ([email protected])
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for cpp_style.py."""
# FIXME: Add a good test that tests UpdateIncludeState.
import codecs
import os
import random
import re
import unittest
import cpp as cpp_style
from cpp import CppChecker
from ..filter import FilterConfiguration
# This class works as an error collector and replaces cpp_style.Error
# function for the unit tests. We also verify each category we see
# is in STYLE_CATEGORIES, to help keep that list up to date.
class ErrorCollector:
_all_style_categories = CppChecker.categories
# This is a list including all categories seen in any unit test.
_seen_style_categories = {}
def __init__(self, assert_fn, filter=None, lines_to_check=None):
"""assert_fn: a function to call when we notice a problem.
filter: filters the errors that we are concerned about."""
self._assert_fn = assert_fn
self._errors = []
self._lines_to_check = lines_to_check
if not filter:
filter = FilterConfiguration()
self._filter = filter
def __call__(self, line_number, category, confidence, message):
self._assert_fn(category in self._all_style_categories,
'Message "%s" has category "%s",'
' which is not in STYLE_CATEGORIES' % (message, category))
if self._lines_to_check and not line_number in self._lines_to_check:
return False
if self._filter.should_check(category, ""):
self._seen_style_categories[category] = 1
self._errors.append('%s [%s] [%d]' % (message, category, confidence))
return True
def results(self):
if len(self._errors) < 2:
return ''.join(self._errors) # Most tests expect to have a string.
else:
return self._errors # Let's give a list if there is more than one.
def result_list(self):
return self._errors
def verify_all_categories_are_seen(self):
"""Fails if there's a category in _all_style_categories - _seen_style_categories.
This should only be called after all tests are run, so
_seen_style_categories has had a chance to fully populate. Since
this isn't called from within the normal unittest framework, we
can't use the normal unittest assert macros. Instead we just exit
when we see an error. Good thing this test is always run last!
"""
for category in self._all_style_categories:
if category not in self._seen_style_categories:
import sys
sys.exit('FATAL ERROR: There are no tests for category "%s"' % category)
# This class is a lame mock of codecs. We do not verify filename, mode, or
# encoding, but for the current use case it is not needed.
class MockIo:
def __init__(self, mock_file):
self.mock_file = mock_file
def open(self, unused_filename, unused_mode, unused_encoding, _): # NOLINT
# (lint doesn't like open as a method name)
return self.mock_file
class CppFunctionsTest(unittest.TestCase):
"""Supports testing functions that do not need CppStyleTestBase."""
def test_convert_to_lower_with_underscores(self):
self.assertEqual(cpp_style._convert_to_lower_with_underscores('ABC'), 'abc')
self.assertEqual(cpp_style._convert_to_lower_with_underscores('aB'), 'a_b')
self.assertEqual(cpp_style._convert_to_lower_with_underscores('isAName'), 'is_a_name')
self.assertEqual(cpp_style._convert_to_lower_with_underscores('AnotherTest'), 'another_test')
self.assertEqual(cpp_style._convert_to_lower_with_underscores('PassRefPtr<MyClass>'), 'pass_ref_ptr<my_class>')
self.assertEqual(cpp_style._convert_to_lower_with_underscores('_ABC'), '_abc')
def test_create_acronym(self):
self.assertEqual(cpp_style._create_acronym('ABC'), 'ABC')
self.assertEqual(cpp_style._create_acronym('IsAName'), 'IAN')
self.assertEqual(cpp_style._create_acronym('PassRefPtr<MyClass>'), 'PRP<MC>')
def test_is_c_or_objective_c(self):
clean_lines = cpp_style.CleansedLines([''])
clean_objc_lines = cpp_style.CleansedLines(['#import "header.h"'])
self.assertTrue(cpp_style._FileState(clean_lines, 'c').is_c_or_objective_c())
self.assertTrue(cpp_style._FileState(clean_lines, 'm').is_c_or_objective_c())
self.assertFalse(cpp_style._FileState(clean_lines, 'cpp').is_c_or_objective_c())
self.assertFalse(cpp_style._FileState(clean_lines, 'cc').is_c_or_objective_c())
self.assertFalse(cpp_style._FileState(clean_lines, 'h').is_c_or_objective_c())
self.assertTrue(cpp_style._FileState(clean_objc_lines, 'h').is_c_or_objective_c())
def test_parameter(self):
# Test type.
parameter = cpp_style.Parameter('ExceptionCode', 13, 1)
self.assertEqual(parameter.type, 'ExceptionCode')
self.assertEqual(parameter.name, '')
self.assertEqual(parameter.row, 1)
# Test type and name.
parameter = cpp_style.Parameter('PassRefPtr<MyClass> parent', 19, 1)
self.assertEqual(parameter.type, 'PassRefPtr<MyClass>')
self.assertEqual(parameter.name, 'parent')
self.assertEqual(parameter.row, 1)
# Test type, no name, with default value.
parameter = cpp_style.Parameter('MyClass = 0', 7, 0)
self.assertEqual(parameter.type, 'MyClass')
self.assertEqual(parameter.name, '')
self.assertEqual(parameter.row, 0)
# Test type, name, and default value.
parameter = cpp_style.Parameter('MyClass a = 0', 7, 0)
self.assertEqual(parameter.type, 'MyClass')
self.assertEqual(parameter.name, 'a')
self.assertEqual(parameter.row, 0)
def test_single_line_view(self):
start_position = cpp_style.Position(row=1, column=1)
end_position = cpp_style.Position(row=3, column=1)
single_line_view = cpp_style.SingleLineView(['0', 'abcde', 'fgh', 'i'], start_position, end_position)
self.assertEqual(single_line_view.single_line, 'bcde fgh i')
self.assertEqual(single_line_view.convert_column_to_row(0), 1)
self.assertEqual(single_line_view.convert_column_to_row(4), 1)
self.assertEqual(single_line_view.convert_column_to_row(5), 2)
self.assertEqual(single_line_view.convert_column_to_row(8), 2)
self.assertEqual(single_line_view.convert_column_to_row(9), 3)
self.assertEqual(single_line_view.convert_column_to_row(100), 3)
start_position = cpp_style.Position(row=0, column=3)
end_position = cpp_style.Position(row=0, column=4)
single_line_view = cpp_style.SingleLineView(['abcdef'], start_position, end_position)
self.assertEqual(single_line_view.single_line, 'd')
def test_create_skeleton_parameters(self):
self.assertEqual(cpp_style.create_skeleton_parameters(''), '')
self.assertEqual(cpp_style.create_skeleton_parameters(' '), ' ')
self.assertEqual(cpp_style.create_skeleton_parameters('long'), 'long,')
self.assertEqual(cpp_style.create_skeleton_parameters('const unsigned long int'), ' int,')
self.assertEqual(cpp_style.create_skeleton_parameters('long int*'), ' int ,')
self.assertEqual(cpp_style.create_skeleton_parameters('PassRefPtr<Foo> a'), 'PassRefPtr a,')
self.assertEqual(cpp_style.create_skeleton_parameters(
'ComplexTemplate<NestedTemplate1<MyClass1, MyClass2>, NestedTemplate1<MyClass1, MyClass2> > param, int second'),
'ComplexTemplate param, int second,')
self.assertEqual(cpp_style.create_skeleton_parameters('int = 0, Namespace::Type& a'), 'int , Type a,')
# Create skeleton parameters is a bit too aggressive with function variables, but
# it allows for parsing other parameters and declarations like this are rare.
self.assertEqual(cpp_style.create_skeleton_parameters('void (*fn)(int a, int b), Namespace::Type& a'),
'void , Type a,')
# This doesn't look like functions declarations but the simplifications help to eliminate false positives.
self.assertEqual(cpp_style.create_skeleton_parameters('b{d}'), 'b ,')
def test_find_parameter_name_index(self):
self.assertEqual(cpp_style.find_parameter_name_index(' int a '), 5)
self.assertEqual(cpp_style.find_parameter_name_index(' PassRefPtr '), 16)
self.assertEqual(cpp_style.find_parameter_name_index('double'), 6)
def test_parameter_list(self):
elided_lines = ['int blah(PassRefPtr<MyClass> paramName,',
'const Other1Class& foo,',
'const ComplexTemplate<Class1, NestedTemplate<P1, P2> >* const * param = new ComplexTemplate<Class1, NestedTemplate<P1, P2> >(34, 42),',
'int* myCount = 0);']
start_position = cpp_style.Position(row=0, column=8)
end_position = cpp_style.Position(row=3, column=16)
expected_parameters = ({'type': 'PassRefPtr<MyClass>', 'name': 'paramName', 'row': 0},
{'type': 'const Other1Class&', 'name': 'foo', 'row': 1},
{'type': 'const ComplexTemplate<Class1, NestedTemplate<P1, P2> >* const *', 'name': 'param', 'row': 2},
{'type': 'int*', 'name': 'myCount', 'row': 3})
index = 0
for parameter in cpp_style.parameter_list(elided_lines, start_position, end_position):
expected_parameter = expected_parameters[index]
self.assertEqual(parameter.type, expected_parameter['type'])
self.assertEqual(parameter.name, expected_parameter['name'])
self.assertEqual(parameter.row, expected_parameter['row'])
index += 1
self.assertEqual(index, len(expected_parameters))
def test_check_parameter_against_text(self):
error_collector = ErrorCollector(self.assertTrue)
parameter = cpp_style.Parameter('FooF ooF', 4, 1)
self.assertFalse(cpp_style._check_parameter_name_against_text(parameter, 'FooF', error_collector))
self.assertEqual(error_collector.results(),
'The parameter name "ooF" adds no information, so it should be removed. [readability/parameter_name] [5]')
class CppStyleTestBase(unittest.TestCase):
"""Provides some useful helper functions for cpp_style tests.
Attributes:
min_confidence: An integer that is the current minimum confidence
level for the tests.
"""
# FIXME: Refactor the unit tests so the confidence level is passed
# explicitly, just like it is in the real code.
min_confidence = 1;
# Helper function to avoid needing to explicitly pass confidence
# in all the unit test calls to cpp_style.process_file_data().
def process_file_data(self, filename, file_extension, lines, error, unit_test_config={}):
"""Call cpp_style.process_file_data() with the min_confidence."""
return cpp_style.process_file_data(filename, file_extension, lines,
error, self.min_confidence, unit_test_config)
def perform_lint(self, code, filename, basic_error_rules, unit_test_config={}, lines_to_check=None):
error_collector = ErrorCollector(self.assertTrue, FilterConfiguration(basic_error_rules), lines_to_check)
lines = code.split('\n')
extension = filename.split('.')[1]
self.process_file_data(filename, extension, lines, error_collector, unit_test_config)
return error_collector.results()
# Perform lint on single line of input and return the error message.
def perform_single_line_lint(self, code, filename):
basic_error_rules = ('-build/header_guard',
'-legal/copyright',
'-readability/fn_size',
'-readability/parameter_name',
'-readability/pass_ptr',
'-whitespace/ending_newline')
return self.perform_lint(code, filename, basic_error_rules)
# Perform lint over multiple lines and return the error message.
def perform_multi_line_lint(self, code, file_extension):
basic_error_rules = ('-build/header_guard',
'-legal/copyright',
'-readability/parameter_name',
'-whitespace/ending_newline')
return self.perform_lint(code, 'test.' + file_extension, basic_error_rules)
# Only keep some errors related to includes, namespaces and rtti.
def perform_language_rules_check(self, filename, code, lines_to_check=None):
basic_error_rules = ('-',
'+build/include',
'+build/include_order',
'+build/namespaces',
'+runtime/rtti')
return self.perform_lint(code, filename, basic_error_rules, lines_to_check=lines_to_check)
# Only keep function length errors.
def perform_function_lengths_check(self, code):
basic_error_rules = ('-',
'+readability/fn_size')
return self.perform_lint(code, 'test.cpp', basic_error_rules)
# Only keep pass ptr errors.
def perform_pass_ptr_check(self, code):
basic_error_rules = ('-',
'+readability/pass_ptr')
return self.perform_lint(code, 'test.cpp', basic_error_rules)
# Only keep leaky pattern errors.
def perform_leaky_pattern_check(self, code):
basic_error_rules = ('-',
'+runtime/leaky_pattern')
return self.perform_lint(code, 'test.cpp', basic_error_rules)
# Only include what you use errors.
def perform_include_what_you_use(self, code, filename='foo.h', io=codecs):
basic_error_rules = ('-',
'+build/include_what_you_use')
unit_test_config = {cpp_style.INCLUDE_IO_INJECTION_KEY: io}
return self.perform_lint(code, filename, basic_error_rules, unit_test_config)
# Perform lint and compare the error message with "expected_message".
def assert_lint(self, code, expected_message, file_name='foo.cpp'):
self.assertEqual(expected_message, self.perform_single_line_lint(code, file_name))
def assert_lint_one_of_many_errors_re(self, code, expected_message_re, file_name='foo.cpp'):
messages = self.perform_single_line_lint(code, file_name)
for message in messages:
if re.search(expected_message_re, message):
return
self.assertEqual(expected_message_re, messages)
def assert_multi_line_lint(self, code, expected_message, file_name='foo.h'):
file_extension = file_name[file_name.rfind('.') + 1:]
self.assertEqual(expected_message, self.perform_multi_line_lint(code, file_extension))
def assert_multi_line_lint_re(self, code, expected_message_re, file_name='foo.h'):
file_extension = file_name[file_name.rfind('.') + 1:]
message = self.perform_multi_line_lint(code, file_extension)
if not re.search(expected_message_re, message):
self.fail('Message was:\n' + message + 'Expected match to "' + expected_message_re + '"')
def assert_language_rules_check(self, file_name, code, expected_message, lines_to_check=None):
self.assertEqual(expected_message,
self.perform_language_rules_check(file_name, code, lines_to_check))
def assert_include_what_you_use(self, code, expected_message):
self.assertEqual(expected_message,
self.perform_include_what_you_use(code))
def assert_blank_lines_check(self, lines, start_errors, end_errors):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp', lines, error_collector)
self.assertEqual(
start_errors,
error_collector.results().count(
'Blank line at the start of a code block. Is this needed?'
' [whitespace/blank_line] [2]'))
self.assertEqual(
end_errors,
error_collector.results().count(
'Blank line at the end of a code block. Is this needed?'
' [whitespace/blank_line] [3]'))
def assert_positions_equal(self, position, tuple_position):
"""Checks if the two positions are equal.
position: a cpp_style.Position object.
tuple_position: a tuple (row, column) to compare against."""
self.assertEqual(position, cpp_style.Position(tuple_position[0], tuple_position[1]),
'position %s, tuple_position %s' % (position, tuple_position))
class FunctionDetectionTest(CppStyleTestBase):
def perform_function_detection(self, lines, function_information, detection_line=0):
clean_lines = cpp_style.CleansedLines(lines)
function_state = cpp_style._FunctionState(5)
error_collector = ErrorCollector(self.assertTrue)
cpp_style.detect_functions(clean_lines, detection_line, function_state, error_collector)
if not function_information:
self.assertEqual(function_state.in_a_function, False)
return
self.assertEqual(function_state.in_a_function, True)
self.assertEqual(function_state.current_function, function_information['name'] + '()')
self.assertEqual(function_state.modifiers_and_return_type(), function_information['modifiers_and_return_type'])
self.assertEqual(function_state.is_pure, function_information['is_pure'])
self.assertEqual(function_state.is_declaration, function_information['is_declaration'])
self.assert_positions_equal(function_state.function_name_start_position, function_information['function_name_start_position'])
self.assert_positions_equal(function_state.parameter_start_position, function_information['parameter_start_position'])
self.assert_positions_equal(function_state.parameter_end_position, function_information['parameter_end_position'])
self.assert_positions_equal(function_state.body_start_position, function_information['body_start_position'])
self.assert_positions_equal(function_state.end_position, function_information['end_position'])
expected_parameters = function_information.get('parameter_list')
if expected_parameters:
actual_parameters = function_state.parameter_list()
self.assertEqual(len(actual_parameters), len(expected_parameters))
for index in range(len(expected_parameters)):
actual_parameter = actual_parameters[index]
expected_parameter = expected_parameters[index]
self.assertEqual(actual_parameter.type, expected_parameter['type'])
self.assertEqual(actual_parameter.name, expected_parameter['name'])
self.assertEqual(actual_parameter.row, expected_parameter['row'])
def test_basic_function_detection(self):
self.perform_function_detection(
['void theTestFunctionName(int) {',
'}'],
{'name': 'theTestFunctionName',
'modifiers_and_return_type': 'void',
'function_name_start_position': (0, 5),
'parameter_start_position': (0, 24),
'parameter_end_position': (0, 29),
'body_start_position': (0, 30),
'end_position': (1, 1),
'is_pure': False,
'is_declaration': False})
def test_function_declaration_detection(self):
self.perform_function_detection(
['void aFunctionName(int);'],
{'name': 'aFunctionName',
'modifiers_and_return_type': 'void',
'function_name_start_position': (0, 5),
'parameter_start_position': (0, 18),
'parameter_end_position': (0, 23),
'body_start_position': (0, 23),
'end_position': (0, 24),
'is_pure': False,
'is_declaration': True})
self.perform_function_detection(
['CheckedInt<T> operator /(const CheckedInt<T> &lhs, const CheckedInt<T> &rhs);'],
{'name': 'operator /',
'modifiers_and_return_type': 'CheckedInt<T>',
'function_name_start_position': (0, 14),
'parameter_start_position': (0, 24),
'parameter_end_position': (0, 76),
'body_start_position': (0, 76),
'end_position': (0, 77),
'is_pure': False,
'is_declaration': True})
self.perform_function_detection(
['CheckedInt<T> operator -(const CheckedInt<T> &lhs, const CheckedInt<T> &rhs);'],
{'name': 'operator -',
'modifiers_and_return_type': 'CheckedInt<T>',
'function_name_start_position': (0, 14),
'parameter_start_position': (0, 24),
'parameter_end_position': (0, 76),
'body_start_position': (0, 76),
'end_position': (0, 77),
'is_pure': False,
'is_declaration': True})
self.perform_function_detection(
['CheckedInt<T> operator !=(const CheckedInt<T> &lhs, const CheckedInt<T> &rhs);'],
{'name': 'operator !=',
'modifiers_and_return_type': 'CheckedInt<T>',
'function_name_start_position': (0, 14),
'parameter_start_position': (0, 25),
'parameter_end_position': (0, 77),
'body_start_position': (0, 77),
'end_position': (0, 78),
'is_pure': False,
'is_declaration': True})
self.perform_function_detection(
['CheckedInt<T> operator +(const CheckedInt<T> &lhs, const CheckedInt<T> &rhs);'],
{'name': 'operator +',
'modifiers_and_return_type': 'CheckedInt<T>',
'function_name_start_position': (0, 14),
'parameter_start_position': (0, 24),
'parameter_end_position': (0, 76),
'body_start_position': (0, 76),
'end_position': (0, 77),
'is_pure': False,
'is_declaration': True})
def test_pure_function_detection(self):
self.perform_function_detection(
['virtual void theTestFunctionName(int = 0);'],
{'name': 'theTestFunctionName',
'modifiers_and_return_type': 'virtual void',
'function_name_start_position': (0, 13),
'parameter_start_position': (0, 32),
'parameter_end_position': (0, 41),
'body_start_position': (0, 41),
'end_position': (0, 42),
'is_pure': False,
'is_declaration': True})
self.perform_function_detection(
['virtual void theTestFunctionName(int) = 0;'],
{'name': 'theTestFunctionName',
'modifiers_and_return_type': 'virtual void',
'function_name_start_position': (0, 13),
'parameter_start_position': (0, 32),
'parameter_end_position': (0, 37),
'body_start_position': (0, 41),
'end_position': (0, 42),
'is_pure': True,
'is_declaration': True})
# Hopefully, no one writes code like this but it is a tricky case.
self.perform_function_detection(
['virtual void theTestFunctionName(int)',
' = ',
' 0 ;'],
{'name': 'theTestFunctionName',
'modifiers_and_return_type': 'virtual void',
'function_name_start_position': (0, 13),
'parameter_start_position': (0, 32),
'parameter_end_position': (0, 37),
'body_start_position': (2, 3),
'end_position': (2, 4),
'is_pure': True,
'is_declaration': True})
def test_ignore_macros(self):
self.perform_function_detection(['void aFunctionName(int); \\'], None)
def test_non_functions(self):
# This case exposed an error because the open brace was in quotes.
self.perform_function_detection(
['asm(',
' "stmdb sp!, {r1-r3}" "\n"',
');'],
# This isn't a function but it looks like one to our simple
# algorithm and that is ok.
{'name': 'asm',
'modifiers_and_return_type': '',
'function_name_start_position': (0, 0),
'parameter_start_position': (0, 3),
'parameter_end_position': (2, 1),
'body_start_position': (2, 1),
'end_position': (2, 2),
'is_pure': False,
'is_declaration': True})
# Simple test case with something that is not a function.
self.perform_function_detection(['class Stuff;'], None)
def test_parameter_list(self):
# A function with no arguments.
function_state = self.perform_function_detection(
['void functionName();'],
{'name': 'functionName',
'modifiers_and_return_type': 'void',
'function_name_start_position': (0, 5),
'parameter_start_position': (0, 17),
'parameter_end_position': (0, 19),
'body_start_position': (0, 19),
'end_position': (0, 20),
'is_pure': False,
'is_declaration': True,
'parameter_list': ()})
# A function with one argument.
function_state = self.perform_function_detection(
['void functionName(int);'],
{'name': 'functionName',
'modifiers_and_return_type': 'void',
'function_name_start_position': (0, 5),
'parameter_start_position': (0, 17),
'parameter_end_position': (0, 22),
'body_start_position': (0, 22),
'end_position': (0, 23),
'is_pure': False,
'is_declaration': True,
'parameter_list':
({'type': 'int', 'name': '', 'row': 0},)})
# A function with unsigned and short arguments
function_state = self.perform_function_detection(
['void functionName(unsigned a, short b, long c, long long short unsigned int);'],
{'name': 'functionName',
'modifiers_and_return_type': 'void',
'function_name_start_position': (0, 5),
'parameter_start_position': (0, 17),
'parameter_end_position': (0, 76),
'body_start_position': (0, 76),
'end_position': (0, 77),
'is_pure': False,
'is_declaration': True,
'parameter_list':
({'type': 'unsigned', 'name': 'a', 'row': 0},
{'type': 'short', 'name': 'b', 'row': 0},
{'type': 'long', 'name': 'c', 'row': 0},
{'type': 'long long short unsigned int', 'name': '', 'row': 0})})
# Some parameter type with modifiers and no parameter names.
function_state = self.perform_function_detection(
['virtual void determineARIADropEffects(Vector<String>*&, const unsigned long int*&, const MediaPlayer::Preload, Other<Other2, Other3<P1, P2> >, int);'],
{'name': 'determineARIADropEffects',
'modifiers_and_return_type': 'virtual void',
'parameter_start_position': (0, 37),
'function_name_start_position': (0, 13),
'parameter_end_position': (0, 147),
'body_start_position': (0, 147),
'end_position': (0, 148),
'is_pure': False,
'is_declaration': True,
'parameter_list':
({'type': 'Vector<String>*&', 'name': '', 'row': 0},
{'type': 'const unsigned long int*&', 'name': '', 'row': 0},
{'type': 'const MediaPlayer::Preload', 'name': '', 'row': 0},
{'type': 'Other<Other2, Other3<P1, P2> >', 'name': '', 'row': 0},
{'type': 'int', 'name': '', 'row': 0})})
# Try parsing a function with a very complex definition.
function_state = self.perform_function_detection(
['#define MyMacro(a) a',
'virtual',
'AnotherTemplate<Class1, Class2> aFunctionName(PassRefPtr<MyClass> paramName,',
'const Other1Class& foo,',
'const ComplexTemplate<Class1, NestedTemplate<P1, P2> >* const * param = new ComplexTemplate<Class1, NestedTemplate<P1, P2> >(34, 42),',
'int* myCount = 0);'],
{'name': 'aFunctionName',
'modifiers_and_return_type': 'virtual AnotherTemplate<Class1, Class2>',
'function_name_start_position': (2, 32),
'parameter_start_position': (2, 45),
'parameter_end_position': (5, 17),
'body_start_position': (5, 17),
'end_position': (5, 18),
'is_pure': False,
'is_declaration': True,
'parameter_list':
({'type': 'PassRefPtr<MyClass>', 'name': 'paramName', 'row': 2},
{'type': 'const Other1Class&', 'name': 'foo', 'row': 3},
{'type': 'const ComplexTemplate<Class1, NestedTemplate<P1, P2> >* const *', 'name': 'param', 'row': 4},
{'type': 'int*', 'name': 'myCount', 'row': 5})},
detection_line=2)
class CppStyleTest(CppStyleTestBase):
def test_asm_lines_ignored(self):
self.assert_lint(
'__asm mov [registration], eax',
'')
# Test get line width.
def test_get_line_width(self):
self.assertEqual(0, cpp_style.get_line_width(''))
self.assertEqual(10, cpp_style.get_line_width(u'x' * 10))
self.assertEqual(16, cpp_style.get_line_width(u'都|道|府|県|支庁'))
def test_find_next_multi_line_comment_start(self):
self.assertEqual(1, cpp_style.find_next_multi_line_comment_start([''], 0))
lines = ['a', 'b', '/* c']
self.assertEqual(2, cpp_style.find_next_multi_line_comment_start(lines, 0))
lines = ['char a[] = "/*";'] # not recognized as comment.
self.assertEqual(1, cpp_style.find_next_multi_line_comment_start(lines, 0))
def test_find_next_multi_line_comment_end(self):
self.assertEqual(1, cpp_style.find_next_multi_line_comment_end([''], 0))
lines = ['a', 'b', ' c */']
self.assertEqual(2, cpp_style.find_next_multi_line_comment_end(lines, 0))
def test_remove_multi_line_comments_from_range(self):
lines = ['a', ' /* comment ', ' * still comment', ' comment */ ', 'b']
cpp_style.remove_multi_line_comments_from_range(lines, 1, 4)
self.assertEqual(['a', '// dummy', '// dummy', '// dummy', 'b'], lines)
def test_position(self):
position = cpp_style.Position(3, 4)
self.assert_positions_equal(position, (3, 4))
self.assertEqual(position.row, 3)
self.assertTrue(position > cpp_style.Position(position.row - 1, position.column + 1))
self.assertTrue(position > cpp_style.Position(position.row, position.column - 1))
self.assertTrue(position < cpp_style.Position(position.row, position.column + 1))
self.assertTrue(position < cpp_style.Position(position.row + 1, position.column - 1))
self.assertEqual(position.__str__(), '(3, 4)')
def test_rfind_in_lines(self):
not_found_position = cpp_style.Position(10, 11)
start_position = cpp_style.Position(2, 2)
lines = ['ab', 'ace', 'test']
self.assertEqual(not_found_position, cpp_style._rfind_in_lines('st', lines, start_position, not_found_position))
self.assertTrue(cpp_style.Position(1, 1) == cpp_style._rfind_in_lines('a', lines, start_position, not_found_position))
self.assertEqual(cpp_style.Position(2, 2), cpp_style._rfind_in_lines('(te|a)', lines, start_position, not_found_position))
def test_close_expression(self):
self.assertEqual(cpp_style.Position(1, -1), cpp_style.close_expression([')('], cpp_style.Position(0, 1)))
self.assertEqual(cpp_style.Position(1, -1), cpp_style.close_expression([') ()'], cpp_style.Position(0, 1)))
self.assertEqual(cpp_style.Position(0, 4), cpp_style.close_expression([')[)]'], cpp_style.Position(0, 1)))
self.assertEqual(cpp_style.Position(0, 5), cpp_style.close_expression(['}{}{}'], cpp_style.Position(0, 3)))
self.assertEqual(cpp_style.Position(1, 1), cpp_style.close_expression(['}{}{', '}'], cpp_style.Position(0, 3)))
self.assertEqual(cpp_style.Position(2, -1), cpp_style.close_expression(['][][', ' '], cpp_style.Position(0, 3)))
def test_spaces_at_end_of_line(self):
self.assert_lint(
'// Hello there ',
'Line ends in whitespace. Consider deleting these extra spaces.'
' [whitespace/end_of_line] [4]')
# Test C-style cast cases.
def test_cstyle_cast(self):
self.assert_lint(
'int a = (int)1.0;',
'Using C-style cast. Use static_cast<int>(...) instead'
' [readability/casting] [4]')
self.assert_lint(
'int *a = (int *)DEFINED_VALUE;',
'Using C-style cast. Use reinterpret_cast<int *>(...) instead'
' [readability/casting] [4]', 'foo.c')
self.assert_lint(
'uint16 a = (uint16)1.0;',
'Using C-style cast. Use static_cast<uint16>(...) instead'
' [readability/casting] [4]')
self.assert_lint(
'int32 a = (int32)1.0;',
'Using C-style cast. Use static_cast<int32>(...) instead'
' [readability/casting] [4]')
self.assert_lint(
'uint64 a = (uint64)1.0;',
'Using C-style cast. Use static_cast<uint64>(...) instead'
' [readability/casting] [4]')
# Test taking address of casts (runtime/casting)
def test_runtime_casting(self):
self.assert_lint(
'int* x = &static_cast<int*>(foo);',
'Are you taking an address of a cast? '
'This is dangerous: could be a temp var. '
'Take the address before doing the cast, rather than after'
' [runtime/casting] [4]')
self.assert_lint(
'int* x = &dynamic_cast<int *>(foo);',
['Are you taking an address of a cast? '
'This is dangerous: could be a temp var. '
'Take the address before doing the cast, rather than after'
' [runtime/casting] [4]',
'Do not use dynamic_cast<>. If you need to cast within a class '
'hierarchy, use static_cast<> to upcast. Google doesn\'t support '
'RTTI. [runtime/rtti] [5]'])
self.assert_lint(
'int* x = &reinterpret_cast<int *>(foo);',
'Are you taking an address of a cast? '
'This is dangerous: could be a temp var. '
'Take the address before doing the cast, rather than after'
' [runtime/casting] [4]')
# It's OK to cast an address.
self.assert_lint(
'int* x = reinterpret_cast<int *>(&foo);',
'')
def test_runtime_selfinit(self):
self.assert_lint(
'Foo::Foo(Bar r, Bel l) : r_(r_), l_(l_) { }',
'You seem to be initializing a member variable with itself.'
' [runtime/init] [4]')
self.assert_lint(
'Foo::Foo(Bar r, Bel l) : r_(r), l_(l) { }',
'')
self.assert_lint(
'Foo::Foo(Bar r) : r_(r), l_(r_), ll_(l_) { }',
'')
def test_runtime_rtti(self):
statement = 'int* x = dynamic_cast<int*>(&foo);'
error_message = (
'Do not use dynamic_cast<>. If you need to cast within a class '
'hierarchy, use static_cast<> to upcast. Google doesn\'t support '
'RTTI. [runtime/rtti] [5]')
# dynamic_cast is disallowed in most files.
self.assert_language_rules_check('foo.cpp', statement, error_message)
self.assert_language_rules_check('foo.h', statement, error_message)
# Test for static_cast readability.
def test_static_cast_readability(self):
self.assert_lint(
'Text* x = static_cast<Text*>(foo);',
'Consider using toText helper function in WebCore/dom/Text.h '
'instead of static_cast<Text*>'
' [readability/check] [4]')
# We cannot test this functionality because of difference of
# function definitions. Anyway, we may never enable this.
#
# # Test for unnamed arguments in a method.
# def test_check_for_unnamed_params(self):
# message = ('All parameters should be named in a function'
# ' [readability/function] [3]')
# self.assert_lint('virtual void A(int*) const;', message)
# self.assert_lint('virtual void B(void (*fn)(int*));', message)
# self.assert_lint('virtual void C(int*);', message)
# self.assert_lint('void *(*f)(void *) = x;', message)
# self.assert_lint('void Method(char*) {', message)
# self.assert_lint('void Method(char*);', message)
# self.assert_lint('void Method(char* /*x*/);', message)
# self.assert_lint('typedef void (*Method)(int32);', message)
# self.assert_lint('static void operator delete[](void*) throw();', message)
#
# self.assert_lint('virtual void D(int* p);', '')
# self.assert_lint('void operator delete(void* x) throw();', '')
# self.assert_lint('void Method(char* x)\n{', '')
# self.assert_lint('void Method(char* /*x*/)\n{', '')
# self.assert_lint('void Method(char* x);', '')
# self.assert_lint('typedef void (*Method)(int32 x);', '')
# self.assert_lint('static void operator delete[](void* x) throw();', '')
# self.assert_lint('static void operator delete[](void* /*x*/) throw();', '')
#
# # This one should technically warn, but doesn't because the function
# # pointer is confusing.
# self.assert_lint('virtual void E(void (*fn)(int* p));', '')
# Test deprecated casts such as int(d)
def test_deprecated_cast(self):
self.assert_lint(
'int a = int(2.2);',
'Using deprecated casting style. '
'Use static_cast<int>(...) instead'
' [readability/casting] [4]')
# Checks for false positives...
self.assert_lint(
'int a = int(); // Constructor, o.k.',
'')
self.assert_lint(
'X::X() : a(int()) { } // default Constructor, o.k.',
'')
self.assert_lint(
'operator bool(); // Conversion operator, o.k.',
'')
# The second parameter to a gMock method definition is a function signature
# that often looks like a bad cast but should not picked up by lint.
def test_mock_method(self):
self.assert_lint(
'MOCK_METHOD0(method, int());',
'')
self.assert_lint(
'MOCK_CONST_METHOD1(method, float(string));',
'')
self.assert_lint(
'MOCK_CONST_METHOD2_T(method, double(float, float));',
'')
# Test sizeof(type) cases.
def test_sizeof_type(self):
self.assert_lint(
'sizeof(int);',
'Using sizeof(type). Use sizeof(varname) instead if possible'
' [runtime/sizeof] [1]')
self.assert_lint(
'sizeof(int *);',
'Using sizeof(type). Use sizeof(varname) instead if possible'
' [runtime/sizeof] [1]')
# Test typedef cases. There was a bug that cpp_style misidentified
# typedef for pointer to function as C-style cast and produced
# false-positive error messages.
def test_typedef_for_pointer_to_function(self):
self.assert_lint(
'typedef void (*Func)(int x);',
'')
self.assert_lint(
'typedef void (*Func)(int *x);',
'')
self.assert_lint(
'typedef void Func(int x);',
'')
self.assert_lint(
'typedef void Func(int *x);',
'')
def test_include_what_you_use_no_implementation_files(self):
code = 'std::vector<int> foo;'
self.assertEqual('Add #include <vector> for vector<>'
' [build/include_what_you_use] [4]',
self.perform_include_what_you_use(code, 'foo.h'))
self.assertEqual('',
self.perform_include_what_you_use(code, 'foo.cpp'))
def test_include_what_you_use(self):
self.assert_include_what_you_use(
'''#include <vector>
std::vector<int> foo;
''',
'')
self.assert_include_what_you_use(
'''#include <map>
std::pair<int,int> foo;
''',
'')
self.assert_include_what_you_use(
'''#include <multimap>
std::pair<int,int> foo;
''',
'')
self.assert_include_what_you_use(
'''#include <hash_map>
std::pair<int,int> foo;
''',
'')
self.assert_include_what_you_use(
'''#include <utility>
std::pair<int,int> foo;
''',
'')
self.assert_include_what_you_use(
'''#include <vector>
DECLARE_string(foobar);
''',
'')
self.assert_include_what_you_use(
'''#include <vector>
DEFINE_string(foobar, "", "");
''',
'')
self.assert_include_what_you_use(
'''#include <vector>
std::pair<int,int> foo;
''',
'Add #include <utility> for pair<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
std::vector<int> foo;
''',
'Add #include <vector> for vector<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include <vector>
std::set<int> foo;
''',
'Add #include <set> for set<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
hash_map<int, int> foobar;
''',
'Add #include <hash_map> for hash_map<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
bool foobar = std::less<int>(0,1);
''',
'Add #include <functional> for less<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
bool foobar = min<int>(0,1);
''',
'Add #include <algorithm> for min [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'void a(const string &foobar);',
'Add #include <string> for string [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
bool foobar = swap(0,1);
''',
'Add #include <algorithm> for swap [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
bool foobar = transform(a.begin(), a.end(), b.start(), Foo);
''',
'Add #include <algorithm> for transform '
'[build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include "base/foobar.h"
bool foobar = min_element(a.begin(), a.end());
''',
'Add #include <algorithm> for min_element '
'[build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''foo->swap(0,1);
foo.swap(0,1);
''',
'')
self.assert_include_what_you_use(
'''#include <string>
void a(const std::multimap<int,string> &foobar);
''',
'Add #include <map> for multimap<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include <queue>
void a(const std::priority_queue<int> &foobar);
''',
'')
self.assert_include_what_you_use(
'''#include "base/basictypes.h"
#include "base/port.h"
#include <assert.h>
#include <string>
#include <vector>
vector<string> hajoa;''', '')
self.assert_include_what_you_use(
'''#include <string>
int i = numeric_limits<int>::max()
''',
'Add #include <limits> for numeric_limits<>'
' [build/include_what_you_use] [4]')
self.assert_include_what_you_use(
'''#include <limits>
int i = numeric_limits<int>::max()
''',
'')
# Test the UpdateIncludeState code path.
mock_header_contents = ['#include "blah/foo.h"', '#include "blah/bar.h"']
message = self.perform_include_what_you_use(
'#include "config.h"\n'
'#include "blah/a.h"\n',
filename='blah/a.cpp',
io=MockIo(mock_header_contents))
self.assertEqual(message, '')
mock_header_contents = ['#include <set>']
message = self.perform_include_what_you_use(
'''#include "config.h"
#include "blah/a.h"
std::set<int> foo;''',
filename='blah/a.cpp',
io=MockIo(mock_header_contents))
self.assertEqual(message, '')
# If there's just a .cpp and the header can't be found then it's ok.
message = self.perform_include_what_you_use(
'''#include "config.h"
#include "blah/a.h"
std::set<int> foo;''',
filename='blah/a.cpp')
self.assertEqual(message, '')
# Make sure we find the headers with relative paths.
mock_header_contents = ['']
message = self.perform_include_what_you_use(
'''#include "config.h"
#include "%s%sa.h"
std::set<int> foo;''' % (os.path.basename(os.getcwd()), os.path.sep),
filename='a.cpp',
io=MockIo(mock_header_contents))
self.assertEqual(message, 'Add #include <set> for set<> '
'[build/include_what_you_use] [4]')
def test_files_belong_to_same_module(self):
f = cpp_style.files_belong_to_same_module
self.assertEqual((True, ''), f('a.cpp', 'a.h'))
self.assertEqual((True, ''), f('base/google.cpp', 'base/google.h'))
self.assertEqual((True, ''), f('base/google_test.cpp', 'base/google.h'))
self.assertEqual((True, ''),
f('base/google_unittest.cpp', 'base/google.h'))
self.assertEqual((True, ''),
f('base/internal/google_unittest.cpp',
'base/public/google.h'))
self.assertEqual((True, 'xxx/yyy/'),
f('xxx/yyy/base/internal/google_unittest.cpp',
'base/public/google.h'))
self.assertEqual((True, 'xxx/yyy/'),
f('xxx/yyy/base/google_unittest.cpp',
'base/public/google.h'))
self.assertEqual((True, ''),
f('base/google_unittest.cpp', 'base/google-inl.h'))
self.assertEqual((True, '/home/build/google3/'),
f('/home/build/google3/base/google.cpp', 'base/google.h'))
self.assertEqual((False, ''),
f('/home/build/google3/base/google.cpp', 'basu/google.h'))
self.assertEqual((False, ''), f('a.cpp', 'b.h'))
def test_cleanse_line(self):
self.assertEqual('int foo = 0; ',
cpp_style.cleanse_comments('int foo = 0; // danger!'))
self.assertEqual('int o = 0;',
cpp_style.cleanse_comments('int /* foo */ o = 0;'))
self.assertEqual('foo(int a, int b);',
cpp_style.cleanse_comments('foo(int a /* abc */, int b);'))
self.assertEqual('f(a, b);',
cpp_style.cleanse_comments('f(a, /* name */ b);'))
self.assertEqual('f(a, b);',
cpp_style.cleanse_comments('f(a /* name */, b);'))
self.assertEqual('f(a, b);',
cpp_style.cleanse_comments('f(a, /* name */b);'))
def test_multi_line_comments(self):
# missing explicit is bad
self.assert_multi_line_lint(
r'''int a = 0;
/* multi-liner
class Foo {
Foo(int f); // should cause a lint warning in code
}
*/ ''',
'')
self.assert_multi_line_lint(
'''\
/* int a = 0; multi-liner
static const int b = 0;''',
['Could not find end of multi-line comment'
' [readability/multiline_comment] [5]',
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. Consider replacing these with '
'//-style comments, with #if 0...#endif, or with more clearly '
'structured multi-line comments. [readability/multiline_comment] [5]'])
self.assert_multi_line_lint(r''' /* multi-line comment''',
['Could not find end of multi-line comment'
' [readability/multiline_comment] [5]',
'Complex multi-line /*...*/-style comment found. '
'Lint may give bogus warnings. Consider replacing these with '
'//-style comments, with #if 0...#endif, or with more clearly '
'structured multi-line comments. [readability/multiline_comment] [5]'])
self.assert_multi_line_lint(r''' // /* comment, but not multi-line''', '')
def test_multiline_strings(self):
multiline_string_error_message = (
'Multi-line string ("...") found. This lint script doesn\'t '
'do well with such strings, and may give bogus warnings. They\'re '
'ugly and unnecessary, and you should use concatenation instead".'
' [readability/multiline_string] [5]')
file_path = 'mydir/foo.cpp'
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'cpp',
['const char* str = "This is a\\',
' multiline string.";'],
error_collector)
self.assertEqual(
2, # One per line.
error_collector.result_list().count(multiline_string_error_message))
# Test non-explicit single-argument constructors
def test_explicit_single_argument_constructors(self):
# missing explicit is bad
self.assert_multi_line_lint(
'''\
class Foo {
Foo(int f);
};''',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]')
# missing explicit is bad, even with whitespace
self.assert_multi_line_lint(
'''\
class Foo {
Foo (int f);
};''',
['Extra space before ( in function call [whitespace/parens] [4]',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]'])
# missing explicit, with distracting comment, is still bad
self.assert_multi_line_lint(
'''\
class Foo {
Foo(int f); // simpler than Foo(blargh, blarg)
};''',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]')
# missing explicit, with qualified classname
self.assert_multi_line_lint(
'''\
class Qualifier::AnotherOne::Foo {
Foo(int f);
};''',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]')
# structs are caught as well.
self.assert_multi_line_lint(
'''\
struct Foo {
Foo(int f);
};''',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]')
# Templatized classes are caught as well.
self.assert_multi_line_lint(
'''\
template<typename T> class Foo {
Foo(int f);
};''',
'Single-argument constructors should be marked explicit.'
' [runtime/explicit] [5]')
# proper style is okay
self.assert_multi_line_lint(
'''\
class Foo {
explicit Foo(int f);
};''',
'')
# two argument constructor is okay
self.assert_multi_line_lint(
'''\
class Foo {
Foo(int f, int b);
};''',
'')
# two argument constructor, across two lines, is okay
self.assert_multi_line_lint(
'''\
class Foo {
Foo(int f,
int b);
};''',
'')
# non-constructor (but similar name), is okay
self.assert_multi_line_lint(
'''\
class Foo {
aFoo(int f);
};''',
'')
# constructor with void argument is okay
self.assert_multi_line_lint(
'''\
class Foo {
Foo(void);
};''',
'')
# single argument method is okay
self.assert_multi_line_lint(
'''\
class Foo {
Bar(int b);
};''',
'')
# comments should be ignored
self.assert_multi_line_lint(
'''\
class Foo {
// Foo(int f);
};''',
'')
# single argument function following class definition is okay
# (okay, it's not actually valid, but we don't want a false positive)
self.assert_multi_line_lint(
'''\
class Foo {
Foo(int f, int b);
};
Foo(int f);''',
'')
# single argument function is okay
self.assert_multi_line_lint(
'''static Foo(int f);''',
'')
# single argument copy constructor is okay.
self.assert_multi_line_lint(
'''\
class Foo {
Foo(const Foo&);
};''',
'')
self.assert_multi_line_lint(
'''\
class Foo {
Foo(Foo&);
};''',
'')
def test_slash_star_comment_on_single_line(self):
self.assert_multi_line_lint(
'''/* static */ Foo(int f);''',
'')
self.assert_multi_line_lint(
'''/*/ static */ Foo(int f);''',
'')
self.assert_multi_line_lint(
'''/*/ static Foo(int f);''',
'Could not find end of multi-line comment'
' [readability/multiline_comment] [5]')
self.assert_multi_line_lint(
''' /*/ static Foo(int f);''',
'Could not find end of multi-line comment'
' [readability/multiline_comment] [5]')
# Test suspicious usage of "if" like this:
# if (a == b) {
# DoSomething();
# } if (a == c) { // Should be "else if".
# DoSomething(); // This gets called twice if a == b && a == c.
# }
def test_suspicious_usage_of_if(self):
self.assert_lint(
' if (a == b) {',
'')
self.assert_lint(
' } if (a == b) {',
'Did you mean "else if"? If not, start a new line for "if".'
' [readability/braces] [4]')
# Test suspicious usage of memset. Specifically, a 0
# as the final argument is almost certainly an error.
def test_suspicious_usage_of_memset(self):
# Normal use is okay.
self.assert_lint(
' memset(buf, 0, sizeof(buf))',
'')
# A 0 as the final argument is almost certainly an error.
self.assert_lint(
' memset(buf, sizeof(buf), 0)',
'Did you mean "memset(buf, 0, sizeof(buf))"?'
' [runtime/memset] [4]')
self.assert_lint(
' memset(buf, xsize * ysize, 0)',
'Did you mean "memset(buf, 0, xsize * ysize)"?'
' [runtime/memset] [4]')
# There is legitimate test code that uses this form.
# This is okay since the second argument is a literal.
self.assert_lint(
" memset(buf, 'y', 0)",
'')
self.assert_lint(
' memset(buf, 4, 0)',
'')
self.assert_lint(
' memset(buf, -1, 0)',
'')
self.assert_lint(
' memset(buf, 0xF1, 0)',
'')
self.assert_lint(
' memset(buf, 0xcd, 0)',
'')
def test_check_posix_threading(self):
self.assert_lint('sctime_r()', '')
self.assert_lint('strtok_r()', '')
self.assert_lint(' strtok_r(foo, ba, r)', '')
self.assert_lint('brand()', '')
self.assert_lint('_rand()', '')
self.assert_lint('.rand()', '')
self.assert_lint('>rand()', '')
self.assert_lint('rand()',
'Consider using rand_r(...) instead of rand(...)'
' for improved thread safety.'
' [runtime/threadsafe_fn] [2]')
self.assert_lint('strtok()',
'Consider using strtok_r(...) '
'instead of strtok(...)'
' for improved thread safety.'
' [runtime/threadsafe_fn] [2]')
# Test potential format string bugs like printf(foo).
def test_format_strings(self):
self.assert_lint('printf("foo")', '')
self.assert_lint('printf("foo: %s", foo)', '')
self.assert_lint('DocidForPrintf(docid)', '') # Should not trigger.
self.assert_lint(
'printf(foo)',
'Potential format string bug. Do printf("%s", foo) instead.'
' [runtime/printf] [4]')
self.assert_lint(
'printf(foo.c_str())',
'Potential format string bug. '
'Do printf("%s", foo.c_str()) instead.'
' [runtime/printf] [4]')
self.assert_lint(
'printf(foo->c_str())',
'Potential format string bug. '
'Do printf("%s", foo->c_str()) instead.'
' [runtime/printf] [4]')
self.assert_lint(
'StringPrintf(foo)',
'Potential format string bug. Do StringPrintf("%s", foo) instead.'
''
' [runtime/printf] [4]')
# Variable-length arrays are not permitted.
def test_variable_length_array_detection(self):
errmsg = ('Do not use variable-length arrays. Use an appropriately named '
"('k' followed by CamelCase) compile-time constant for the size."
' [runtime/arrays] [1]')
self.assert_lint('int a[any_old_variable];', errmsg)
self.assert_lint('int doublesize[some_var * 2];', errmsg)
self.assert_lint('int a[afunction()];', errmsg)
self.assert_lint('int a[function(kMaxFooBars)];', errmsg)
self.assert_lint('bool aList[items_->size()];', errmsg)
self.assert_lint('namespace::Type buffer[len+1];', errmsg)
self.assert_lint('int a[64];', '')
self.assert_lint('int a[0xFF];', '')
self.assert_lint('int first[256], second[256];', '')
self.assert_lint('int arrayName[kCompileTimeConstant];', '')
self.assert_lint('char buf[somenamespace::kBufSize];', '')
self.assert_lint('int arrayName[ALL_CAPS];', '')
self.assert_lint('AClass array1[foo::bar::ALL_CAPS];', '')
self.assert_lint('int a[kMaxStrLen + 1];', '')
self.assert_lint('int a[sizeof(foo)];', '')
self.assert_lint('int a[sizeof(*foo)];', '')
self.assert_lint('int a[sizeof foo];', '')
self.assert_lint('int a[sizeof(struct Foo)];', '')
self.assert_lint('int a[128 - sizeof(const bar)];', '')
self.assert_lint('int a[(sizeof(foo) * 4)];', '')
self.assert_lint('int a[(arraysize(fixed_size_array)/2) << 1];', 'Missing spaces around / [whitespace/operators] [3]')
self.assert_lint('delete a[some_var];', '')
self.assert_lint('return a[some_var];', '')
# Brace usage
def test_braces(self):
# Braces shouldn't be followed by a ; unless they're defining a struct
# or initializing an array
self.assert_lint('int a[3] = { 1, 2, 3 };', '')
self.assert_lint(
'''\
const int foo[] =
{1, 2, 3 };''',
'')
# For single line, unmatched '}' with a ';' is ignored (not enough context)
self.assert_multi_line_lint(
'''\
int a[3] = { 1,
2,
3 };''',
'')
self.assert_multi_line_lint(
'''\
int a[2][3] = { { 1, 2 },
{ 3, 4 } };''',
'')
self.assert_multi_line_lint(
'''\
int a[2][3] =
{ { 1, 2 },
{ 3, 4 } };''',
'')
# CHECK/EXPECT_TRUE/EXPECT_FALSE replacements
def test_check_check(self):
self.assert_lint('CHECK(x == 42)',
'Consider using CHECK_EQ instead of CHECK(a == b)'
' [readability/check] [2]')
self.assert_lint('CHECK(x != 42)',
'Consider using CHECK_NE instead of CHECK(a != b)'
' [readability/check] [2]')
self.assert_lint('CHECK(x >= 42)',
'Consider using CHECK_GE instead of CHECK(a >= b)'
' [readability/check] [2]')
self.assert_lint('CHECK(x > 42)',
'Consider using CHECK_GT instead of CHECK(a > b)'
' [readability/check] [2]')
self.assert_lint('CHECK(x <= 42)',
'Consider using CHECK_LE instead of CHECK(a <= b)'
' [readability/check] [2]')
self.assert_lint('CHECK(x < 42)',
'Consider using CHECK_LT instead of CHECK(a < b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x == 42)',
'Consider using DCHECK_EQ instead of DCHECK(a == b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x != 42)',
'Consider using DCHECK_NE instead of DCHECK(a != b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x >= 42)',
'Consider using DCHECK_GE instead of DCHECK(a >= b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x > 42)',
'Consider using DCHECK_GT instead of DCHECK(a > b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x <= 42)',
'Consider using DCHECK_LE instead of DCHECK(a <= b)'
' [readability/check] [2]')
self.assert_lint('DCHECK(x < 42)',
'Consider using DCHECK_LT instead of DCHECK(a < b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE("42" == x)',
'Consider using EXPECT_EQ instead of EXPECT_TRUE(a == b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE("42" != x)',
'Consider using EXPECT_NE instead of EXPECT_TRUE(a != b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE(+42 >= x)',
'Consider using EXPECT_GE instead of EXPECT_TRUE(a >= b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE_M(-42 > x)',
'Consider using EXPECT_GT_M instead of EXPECT_TRUE_M(a > b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE_M(42U <= x)',
'Consider using EXPECT_LE_M instead of EXPECT_TRUE_M(a <= b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE_M(42L < x)',
'Consider using EXPECT_LT_M instead of EXPECT_TRUE_M(a < b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_FALSE(x == 42)',
'Consider using EXPECT_NE instead of EXPECT_FALSE(a == b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_FALSE(x != 42)',
'Consider using EXPECT_EQ instead of EXPECT_FALSE(a != b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_FALSE(x >= 42)',
'Consider using EXPECT_LT instead of EXPECT_FALSE(a >= b)'
' [readability/check] [2]')
self.assert_lint(
'ASSERT_FALSE(x > 42)',
'Consider using ASSERT_LE instead of ASSERT_FALSE(a > b)'
' [readability/check] [2]')
self.assert_lint(
'ASSERT_FALSE(x <= 42)',
'Consider using ASSERT_GT instead of ASSERT_FALSE(a <= b)'
' [readability/check] [2]')
self.assert_lint(
'ASSERT_FALSE_M(x < 42)',
'Consider using ASSERT_GE_M instead of ASSERT_FALSE_M(a < b)'
' [readability/check] [2]')
self.assert_lint('CHECK(some_iterator == obj.end())', '')
self.assert_lint('EXPECT_TRUE(some_iterator == obj.end())', '')
self.assert_lint('EXPECT_FALSE(some_iterator == obj.end())', '')
self.assert_lint('CHECK(CreateTestFile(dir, (1 << 20)));', '')
self.assert_lint('CHECK(CreateTestFile(dir, (1 >> 20)));', '')
self.assert_lint('CHECK(x<42)',
['Missing spaces around <'
' [whitespace/operators] [3]',
'Consider using CHECK_LT instead of CHECK(a < b)'
' [readability/check] [2]'])
self.assert_lint('CHECK(x>42)',
'Consider using CHECK_GT instead of CHECK(a > b)'
' [readability/check] [2]')
self.assert_lint(
' EXPECT_TRUE(42 < x) // Random comment.',
'Consider using EXPECT_LT instead of EXPECT_TRUE(a < b)'
' [readability/check] [2]')
self.assert_lint(
'EXPECT_TRUE( 42 < x )',
['Extra space after ( in function call'
' [whitespace/parens] [4]',
'Consider using EXPECT_LT instead of EXPECT_TRUE(a < b)'
' [readability/check] [2]'])
self.assert_lint(
'CHECK("foo" == "foo")',
'Consider using CHECK_EQ instead of CHECK(a == b)'
' [readability/check] [2]')
self.assert_lint('CHECK_EQ("foo", "foo")', '')
def test_brace_at_begin_of_line(self):
self.assert_lint('{',
'This { should be at the end of the previous line'
' [whitespace/braces] [4]')
self.assert_multi_line_lint(
'#endif\n'
'{\n'
'}\n',
'')
self.assert_multi_line_lint(
'if (condition) {',
'')
self.assert_multi_line_lint(
' MACRO1(macroArg) {',
'')
self.assert_multi_line_lint(
'ACCESSOR_GETTER(MessageEventPorts) {',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'int foo() {',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'int foo() const {',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'int foo() const OVERRIDE {',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'int foo() OVERRIDE {',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'int foo() const\n'
'{\n'
'}\n',
'')
self.assert_multi_line_lint(
'int foo() OVERRIDE\n'
'{\n'
'}\n',
'')
self.assert_multi_line_lint(
'if (condition\n'
' && condition2\n'
' && condition3) {\n'
'}\n',
'')
def test_mismatching_spaces_in_parens(self):
self.assert_lint('if (foo ) {', 'Extra space before ) in if'
' [whitespace/parens] [5]')
self.assert_lint('switch ( foo) {', 'Extra space after ( in switch'
' [whitespace/parens] [5]')
self.assert_lint('for (foo; ba; bar ) {', 'Extra space before ) in for'
' [whitespace/parens] [5]')
self.assert_lint('for ((foo); (ba); (bar) ) {', 'Extra space before ) in for'
' [whitespace/parens] [5]')
self.assert_lint('for (; foo; bar) {', '')
self.assert_lint('for (; (foo); (bar)) {', '')
self.assert_lint('for ( ; foo; bar) {', '')
self.assert_lint('for ( ; (foo); (bar)) {', '')
self.assert_lint('for ( ; foo; bar ) {', 'Extra space before ) in for'
' [whitespace/parens] [5]')
self.assert_lint('for ( ; (foo); (bar) ) {', 'Extra space before ) in for'
' [whitespace/parens] [5]')
self.assert_lint('for (foo; bar; ) {', '')
self.assert_lint('for ((foo); (bar); ) {', '')
self.assert_lint('foreach (foo, foos ) {', 'Extra space before ) in foreach'
' [whitespace/parens] [5]')
self.assert_lint('foreach ( foo, foos) {', 'Extra space after ( in foreach'
' [whitespace/parens] [5]')
self.assert_lint('while ( foo) {', 'Extra space after ( in while'
' [whitespace/parens] [5]')
def test_spacing_for_fncall(self):
self.assert_lint('if (foo) {', '')
self.assert_lint('for (foo;bar;baz) {', '')
self.assert_lint('foreach (foo, foos) {', '')
self.assert_lint('while (foo) {', '')
self.assert_lint('switch (foo) {', '')
self.assert_lint('new (RenderArena()) RenderInline(document())', '')
self.assert_lint('foo( bar)', 'Extra space after ( in function call'
' [whitespace/parens] [4]')
self.assert_lint('foobar( \\', '')
self.assert_lint('foobar( \\', '')
self.assert_lint('( a + b)', 'Extra space after ('
' [whitespace/parens] [2]')
self.assert_lint('((a+b))', '')
self.assert_lint('foo (foo)', 'Extra space before ( in function call'
' [whitespace/parens] [4]')
self.assert_lint('#elif (foo(bar))', '')
self.assert_lint('#elif (foo(bar) && foo(baz))', '')
self.assert_lint('typedef foo (*foo)(foo)', '')
self.assert_lint('typedef foo (*foo12bar_)(foo)', '')
self.assert_lint('typedef foo (Foo::*bar)(foo)', '')
self.assert_lint('foo (Foo::*bar)(',
'Extra space before ( in function call'
' [whitespace/parens] [4]')
self.assert_lint('typedef foo (Foo::*bar)(', '')
self.assert_lint('(foo)(bar)', '')
self.assert_lint('Foo (*foo)(bar)', '')
self.assert_lint('Foo (*foo)(Bar bar,', '')
self.assert_lint('char (*p)[sizeof(foo)] = &foo', '')
self.assert_lint('char (&ref)[sizeof(foo)] = &foo', '')
self.assert_lint('const char32 (*table[])[6];', '')
def test_spacing_before_braces(self):
self.assert_lint('if (foo){', 'Missing space before {'
' [whitespace/braces] [5]')
self.assert_lint('for{', 'Missing space before {'
' [whitespace/braces] [5]')
self.assert_lint('for {', '')
self.assert_lint('EXPECT_DEBUG_DEATH({', '')
def test_spacing_between_braces(self):
self.assert_lint(' { }', '')
self.assert_lint(' {}', 'Missing space inside { }. [whitespace/braces] [5]')
self.assert_lint(' { }', 'Too many spaces inside { }. [whitespace/braces] [5]')
def test_spacing_around_else(self):
self.assert_lint('}else {', 'Missing space before else'
' [whitespace/braces] [5]')
self.assert_lint('} else{', 'Missing space before {'
' [whitespace/braces] [5]')
self.assert_lint('} else {', '')
self.assert_lint('} else if', '')
def test_spacing_for_binary_ops(self):
self.assert_lint('if (foo<=bar) {', 'Missing spaces around <='
' [whitespace/operators] [3]')
self.assert_lint('if (foo<bar) {', 'Missing spaces around <'
' [whitespace/operators] [3]')
self.assert_lint('if (foo<bar->baz) {', 'Missing spaces around <'
' [whitespace/operators] [3]')
self.assert_lint('if (foo<bar->bar) {', 'Missing spaces around <'
' [whitespace/operators] [3]')
self.assert_lint('typedef hash_map<Foo, Bar', 'Missing spaces around <'
' [whitespace/operators] [3]')
self.assert_lint('typedef hash_map<FoooooType, BaaaaarType,', '')
self.assert_lint('a<Foo> t+=b;', 'Missing spaces around +='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo> t-=b;', 'Missing spaces around -='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t*=b;', 'Missing spaces around *='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t/=b;', 'Missing spaces around /='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t|=b;', 'Missing spaces around |='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t&=b;', 'Missing spaces around &='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t<<=b;', 'Missing spaces around <<='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t>>=b;', 'Missing spaces around >>='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t>>=&b|c;', 'Missing spaces around >>='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t<<=*b/c;', 'Missing spaces around <<='
' [whitespace/operators] [3]')
self.assert_lint('a<Foo> t -= b;', '')
self.assert_lint('a<Foo> t += b;', '')
self.assert_lint('a<Foo*> t *= b;', '')
self.assert_lint('a<Foo*> t /= b;', '')
self.assert_lint('a<Foo*> t |= b;', '')
self.assert_lint('a<Foo*> t &= b;', '')
self.assert_lint('a<Foo*> t <<= b;', '')
self.assert_lint('a<Foo*> t >>= b;', '')
self.assert_lint('a<Foo*> t >>= &b|c;', 'Missing spaces around |'
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t <<= *b/c;', 'Missing spaces around /'
' [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t <<= b/c; //Test', [
'Should have a space between // and comment '
'[whitespace/comments] [4]', 'Missing'
' spaces around / [whitespace/operators] [3]'])
self.assert_lint('a<Foo*> t <<= b||c; //Test', ['One space before end'
' of line comments [whitespace/comments] [5]',
'Should have a space between // and comment '
'[whitespace/comments] [4]',
'Missing spaces around || [whitespace/operators] [3]'])
self.assert_lint('a<Foo*> t <<= b&&c; // Test', 'Missing spaces around'
' && [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t <<= b&&&c; // Test', 'Missing spaces around'
' && [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t <<= b&&*c; // Test', 'Missing spaces around'
' && [whitespace/operators] [3]')
self.assert_lint('a<Foo*> t <<= b && *c; // Test', '')
self.assert_lint('a<Foo*> t <<= b && &c; // Test', '')
self.assert_lint('a<Foo*> t <<= b || &c; /*Test', 'Complex multi-line '
'/*...*/-style comment found. Lint may give bogus '
'warnings. Consider replacing these with //-style'
' comments, with #if 0...#endif, or with more clearly'
' structured multi-line comments. [readability/multiline_comment] [5]')
self.assert_lint('a<Foo&> t <<= &b | &c;', '')
self.assert_lint('a<Foo*> t <<= &b & &c; // Test', '')
self.assert_lint('a<Foo*> t <<= *b / &c; // Test', '')
self.assert_lint('if (a=b == 1)', 'Missing spaces around = [whitespace/operators] [4]')
self.assert_lint('a = 1<<20', 'Missing spaces around << [whitespace/operators] [3]')
self.assert_lint('if (a = b == 1)', '')
self.assert_lint('a = 1 << 20', '')
self.assert_multi_line_lint('#include <sys/io.h>\n', '')
self.assert_multi_line_lint('#import <foo/bar.h>\n', '')
def test_operator_methods(self):
self.assert_lint('String operator+(const String&, const String&);', '')
self.assert_lint('String operator/(const String&, const String&);', '')
self.assert_lint('bool operator==(const String&, const String&);', '')
self.assert_lint('String& operator-=(const String&, const String&);', '')
self.assert_lint('String& operator+=(const String&, const String&);', '')
self.assert_lint('String& operator*=(const String&, const String&);', '')
self.assert_lint('String& operator%=(const String&, const String&);', '')
self.assert_lint('String& operator&=(const String&, const String&);', '')
self.assert_lint('String& operator<<=(const String&, const String&);', '')
self.assert_lint('String& operator>>=(const String&, const String&);', '')
self.assert_lint('String& operator|=(const String&, const String&);', '')
self.assert_lint('String& operator^=(const String&, const String&);', '')
def test_spacing_before_last_semicolon(self):
self.assert_lint('call_function() ;',
'Extra space before last semicolon. If this should be an '
'empty statement, use { } instead.'
' [whitespace/semicolon] [5]')
self.assert_lint('while (true) ;',
'Extra space before last semicolon. If this should be an '
'empty statement, use { } instead.'
' [whitespace/semicolon] [5]')
self.assert_lint('default:;',
'Semicolon defining empty statement. Use { } instead.'
' [whitespace/semicolon] [5]')
self.assert_lint(' ;',
'Line contains only semicolon. If this should be an empty '
'statement, use { } instead.'
' [whitespace/semicolon] [5]')
self.assert_lint('for (int i = 0; ;', '')
# Static or global STL strings.
def test_static_or_global_stlstrings(self):
self.assert_lint('string foo;',
'For a static/global string constant, use a C style '
'string instead: "char foo[]".'
' [runtime/string] [4]')
self.assert_lint('string kFoo = "hello"; // English',
'For a static/global string constant, use a C style '
'string instead: "char kFoo[]".'
' [runtime/string] [4]')
self.assert_lint('static string foo;',
'For a static/global string constant, use a C style '
'string instead: "static char foo[]".'
' [runtime/string] [4]')
self.assert_lint('static const string foo;',
'For a static/global string constant, use a C style '
'string instead: "static const char foo[]".'
' [runtime/string] [4]')
self.assert_lint('string Foo::bar;',
'For a static/global string constant, use a C style '
'string instead: "char Foo::bar[]".'
' [runtime/string] [4]')
# Rare case.
self.assert_lint('string foo("foobar");',
'For a static/global string constant, use a C style '
'string instead: "char foo[]".'
' [runtime/string] [4]')
# Should not catch local or member variables.
self.assert_lint(' string foo', '')
# Should not catch functions.
self.assert_lint('string EmptyString() { return ""; }', '')
self.assert_lint('string EmptyString () { return ""; }', '')
self.assert_lint('string VeryLongNameFunctionSometimesEndsWith(\n'
' VeryLongNameType veryLongNameVariable) { }', '')
self.assert_lint('template<>\n'
'string FunctionTemplateSpecialization<SomeType>(\n'
' int x) { return ""; }', '')
self.assert_lint('template<>\n'
'string FunctionTemplateSpecialization<vector<A::B>* >(\n'
' int x) { return ""; }', '')
# should not catch methods of template classes.
self.assert_lint('string Class<Type>::Method() const\n'
'{\n'
' return "";\n'
'}\n', '')
self.assert_lint('string Class<Type>::Method(\n'
' int arg) const\n'
'{\n'
' return "";\n'
'}\n', '')
def test_no_spaces_in_function_calls(self):
self.assert_lint('TellStory(1, 3);',
'')
self.assert_lint('TellStory(1, 3 );',
'Extra space before )'
' [whitespace/parens] [2]')
self.assert_lint('TellStory(1 /* wolf */, 3 /* pigs */);',
'')
self.assert_multi_line_lint('#endif\n );',
'')
def test_one_spaces_between_code_and_comments(self):
self.assert_lint('} // namespace foo',
'')
self.assert_lint('}// namespace foo',
'One space before end of line comments'
' [whitespace/comments] [5]')
self.assert_lint('printf("foo"); // Outside quotes.',
'')
self.assert_lint('int i = 0; // Having one space is fine.','')
self.assert_lint('int i = 0; // Having two spaces is bad.',
'One space before end of line comments'
' [whitespace/comments] [5]')
self.assert_lint('int i = 0; // Having three spaces is bad.',
'One space before end of line comments'
' [whitespace/comments] [5]')
self.assert_lint('// Top level comment', '')
self.assert_lint(' // Line starts with four spaces.', '')
self.assert_lint('foo();\n'
'{ // A scope is opening.', '')
self.assert_lint(' foo();\n'
' { // An indented scope is opening.', '')
self.assert_lint('if (foo) { // not a pure scope',
'')
self.assert_lint('printf("// In quotes.")', '')
self.assert_lint('printf("\\"%s // In quotes.")', '')
self.assert_lint('printf("%s", "// In quotes.")', '')
def test_one_spaces_after_punctuation_in_comments(self):
self.assert_lint('int a; // This is a sentence.',
'')
self.assert_lint('int a; // This is a sentence. ',
'Line ends in whitespace. Consider deleting these extra spaces. [whitespace/end_of_line] [4]')
self.assert_lint('int a; // This is a sentence. This is a another sentence.',
'')
self.assert_lint('int a; // This is a sentence. This is a another sentence.',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
self.assert_lint('int a; // This is a sentence! This is a another sentence.',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
self.assert_lint('int a; // Why did I write this? This is a another sentence.',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
self.assert_lint('int a; // Elementary, my dear.',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
self.assert_lint('int a; // The following should be clear: Is it?',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
self.assert_lint('int a; // Look at the follow semicolon; I hope this gives an error.',
'Should have only a single space after a punctuation in a comment. [whitespace/comments] [5]')
def test_space_after_comment_marker(self):
self.assert_lint('//', '')
self.assert_lint('//x', 'Should have a space between // and comment'
' [whitespace/comments] [4]')
self.assert_lint('// x', '')
self.assert_lint('//----', '')
self.assert_lint('//====', '')
self.assert_lint('//////', '')
self.assert_lint('////// x', '')
self.assert_lint('/// x', '')
self.assert_lint('////x', 'Should have a space between // and comment'
' [whitespace/comments] [4]')
def test_newline_at_eof(self):
def do_test(self, data, is_missing_eof):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp', data.split('\n'),
error_collector)
# The warning appears only once.
self.assertEqual(
int(is_missing_eof),
error_collector.results().count(
'Could not find a newline character at the end of the file.'
' [whitespace/ending_newline] [5]'))
do_test(self, '// Newline\n// at EOF\n', False)
do_test(self, '// No newline\n// at EOF', True)
def test_invalid_utf8(self):
def do_test(self, raw_bytes, has_invalid_utf8):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp',
unicode(raw_bytes, 'utf8', 'replace').split('\n'),
error_collector)
# The warning appears only once.
self.assertEqual(
int(has_invalid_utf8),
error_collector.results().count(
'Line contains invalid UTF-8'
' (or Unicode replacement character).'
' [readability/utf8] [5]'))
do_test(self, 'Hello world\n', False)
do_test(self, '\xe9\x8e\xbd\n', False)
do_test(self, '\xe9x\x8e\xbd\n', True)
# This is the encoding of the replacement character itself (which
# you can see by evaluating codecs.getencoder('utf8')(u'\ufffd')).
do_test(self, '\xef\xbf\xbd\n', True)
def test_is_blank_line(self):
self.assertTrue(cpp_style.is_blank_line(''))
self.assertTrue(cpp_style.is_blank_line(' '))
self.assertTrue(cpp_style.is_blank_line(' \t\r\n'))
self.assertTrue(not cpp_style.is_blank_line('int a;'))
self.assertTrue(not cpp_style.is_blank_line('{'))
def test_blank_lines_check(self):
self.assert_blank_lines_check(['{\n', '\n', '\n', '}\n'], 1, 1)
self.assert_blank_lines_check([' if (foo) {\n', '\n', ' }\n'], 1, 1)
self.assert_blank_lines_check(
['\n', '// {\n', '\n', '\n', '// Comment\n', '{\n', '}\n'], 0, 0)
self.assert_blank_lines_check(['\n', 'run("{");\n', '\n'], 0, 0)
self.assert_blank_lines_check(['\n', ' if (foo) { return 0; }\n', '\n'], 0, 0)
def test_allow_blank_line_before_closing_namespace(self):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp',
['namespace {', '', '} // namespace'],
error_collector)
self.assertEqual(0, error_collector.results().count(
'Blank line at the end of a code block. Is this needed?'
' [whitespace/blank_line] [3]'))
def test_allow_blank_line_before_if_else_chain(self):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp',
['if (hoge) {',
'', # No warning
'} else if (piyo) {',
'', # No warning
'} else if (piyopiyo) {',
' hoge = true;', # No warning
'} else {',
'', # Warning on this line
'}'],
error_collector)
self.assertEqual(1, error_collector.results().count(
'Blank line at the end of a code block. Is this needed?'
' [whitespace/blank_line] [3]'))
def test_else_on_same_line_as_closing_braces(self):
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('foo.cpp', 'cpp',
['if (hoge) {',
'',
'}',
' else {' # Warning on this line
'',
'}'],
error_collector)
self.assertEqual(1, error_collector.results().count(
'An else should appear on the same line as the preceding }'
' [whitespace/newline] [4]'))
def test_else_clause_not_on_same_line_as_else(self):
self.assert_lint(' else DoSomethingElse();',
'Else clause should never be on same line as else '
'(use 2 lines) [whitespace/newline] [4]')
self.assert_lint(' else ifDoSomethingElse();',
'Else clause should never be on same line as else '
'(use 2 lines) [whitespace/newline] [4]')
self.assert_lint(' else if (blah) {', '')
self.assert_lint(' variable_ends_in_else = true;', '')
def test_comma(self):
self.assert_lint('a = f(1,2);',
'Missing space after , [whitespace/comma] [3]')
self.assert_lint('int tmp=a,a=b,b=tmp;',
['Missing spaces around = [whitespace/operators] [4]',
'Missing space after , [whitespace/comma] [3]'])
self.assert_lint('f(a, /* name */ b);', '')
self.assert_lint('f(a, /* name */b);', '')
def test_declaration(self):
self.assert_lint('int a;', '')
self.assert_lint('int a;', 'Extra space between int and a [whitespace/declaration] [3]')
self.assert_lint('int* a;', 'Extra space between int* and a [whitespace/declaration] [3]')
self.assert_lint('else if { }', '')
self.assert_lint('else if { }', 'Extra space between else and if [whitespace/declaration] [3]')
def test_pointer_reference_marker_location(self):
self.assert_lint('int* b;', '', 'foo.cpp')
self.assert_lint('int *b;',
'Declaration has space between type name and * in int *b [whitespace/declaration] [3]',
'foo.cpp')
self.assert_lint('return *b;', '', 'foo.cpp')
self.assert_lint('delete *b;', '', 'foo.cpp')
self.assert_lint('int *b;', '', 'foo.c')
self.assert_lint('int* b;',
'Declaration has space between * and variable name in int* b [whitespace/declaration] [3]',
'foo.c')
self.assert_lint('int& b;', '', 'foo.cpp')
self.assert_lint('int &b;',
'Declaration has space between type name and & in int &b [whitespace/declaration] [3]',
'foo.cpp')
self.assert_lint('return &b;', '', 'foo.cpp')
def test_indent(self):
self.assert_lint('static int noindent;', '')
self.assert_lint(' int fourSpaceIndent;', '')
self.assert_lint(' int oneSpaceIndent;',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_lint(' int threeSpaceIndent;',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_lint(' char* oneSpaceIndent = "public:";',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_lint(' public:',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_lint(' public:',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_lint(' public:',
'Weird number of spaces at line-start. '
'Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_multi_line_lint(
'class Foo {\n'
'public:\n'
' enum Bar {\n'
' Alpha,\n'
' Beta,\n'
'#if ENABLED_BETZ\n'
' Charlie,\n'
'#endif\n'
' };\n'
'};',
'')
def test_not_alabel(self):
self.assert_lint('MyVeryLongNamespace::MyVeryLongClassName::', '')
def test_tab(self):
self.assert_lint('\tint a;',
'Tab found; better to use spaces [whitespace/tab] [1]')
self.assert_lint('int a = 5;\t// set a to 5',
'Tab found; better to use spaces [whitespace/tab] [1]')
def test_unnamed_namespaces_in_headers(self):
self.assert_language_rules_check(
'foo.h', 'namespace {',
'Do not use unnamed namespaces in header files. See'
' http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
' for more information. [build/namespaces] [4]')
# namespace registration macros are OK.
self.assert_language_rules_check('foo.h', 'namespace { \\', '')
# named namespaces are OK.
self.assert_language_rules_check('foo.h', 'namespace foo {', '')
self.assert_language_rules_check('foo.h', 'namespace foonamespace {', '')
self.assert_language_rules_check('foo.cpp', 'namespace {', '')
self.assert_language_rules_check('foo.cpp', 'namespace foo {', '')
def test_build_class(self):
# Test that the linter can parse to the end of class definitions,
# and that it will report when it can't.
# Use multi-line linter because it performs the ClassState check.
self.assert_multi_line_lint(
'class Foo {',
'Failed to find complete declaration of class Foo'
' [build/class] [5]')
# Don't warn on forward declarations of various types.
self.assert_multi_line_lint(
'class Foo;',
'')
self.assert_multi_line_lint(
'''\
struct Foo*
foo = NewFoo();''',
'')
# Here is an example where the linter gets confused, even though
# the code doesn't violate the style guide.
self.assert_multi_line_lint(
'class Foo\n'
'#ifdef DERIVE_FROM_GOO\n'
' : public Goo {\n'
'#else\n'
' : public Hoo {\n'
'#endif\n'
'};',
'Failed to find complete declaration of class Foo'
' [build/class] [5]')
def test_build_end_comment(self):
# The crosstool compiler we currently use will fail to compile the
# code in this test, so we might consider removing the lint check.
self.assert_lint('#endif Not a comment',
'Uncommented text after #endif is non-standard.'
' Use a comment.'
' [build/endif_comment] [5]')
def test_build_forward_decl(self):
# The crosstool compiler we currently use will fail to compile the
# code in this test, so we might consider removing the lint check.
self.assert_lint('class Foo::Goo;',
'Inner-style forward declarations are invalid.'
' Remove this line.'
' [build/forward_decl] [5]')
def test_build_header_guard(self):
file_path = 'mydir/Foo.h'
# We can't rely on our internal stuff to get a sane path on the open source
# side of things, so just parse out the suggested header guard. This
# doesn't allow us to test the suggested header guard, but it does let us
# test all the other header tests.
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h', [], error_collector)
expected_guard = ''
matcher = re.compile(
'No \#ifndef header guard found\, suggested CPP variable is\: ([A-Za-z_0-9]+) ')
for error in error_collector.result_list():
matches = matcher.match(error)
if matches:
expected_guard = matches.group(1)
break
# Make sure we extracted something for our header guard.
self.assertNotEqual(expected_guard, '')
# Wrong guard
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h',
['#ifndef FOO_H', '#define FOO_H'], error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'#ifndef header guard has wrong style, please use: %s'
' [build/header_guard] [5]' % expected_guard),
error_collector.result_list())
# No define
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h',
['#ifndef %s' % expected_guard], error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'No #ifndef header guard found, suggested CPP variable is: %s'
' [build/header_guard] [5]' % expected_guard),
error_collector.result_list())
# Mismatched define
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h',
['#ifndef %s' % expected_guard,
'#define FOO_H'],
error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'No #ifndef header guard found, suggested CPP variable is: %s'
' [build/header_guard] [5]' % expected_guard),
error_collector.result_list())
# No header guard errors
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h',
['#ifndef %s' % expected_guard,
'#define %s' % expected_guard,
'#endif // %s' % expected_guard],
error_collector)
for line in error_collector.result_list():
if line.find('build/header_guard') != -1:
self.fail('Unexpected error: %s' % line)
# Completely incorrect header guard
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'h',
['#ifndef FOO',
'#define FOO',
'#endif // FOO'],
error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'#ifndef header guard has wrong style, please use: %s'
' [build/header_guard] [5]' % expected_guard),
error_collector.result_list())
# Special case for flymake
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('mydir/Foo_flymake.h', 'h',
['#ifndef %s' % expected_guard,
'#define %s' % expected_guard,
'#endif // %s' % expected_guard],
error_collector)
for line in error_collector.result_list():
if line.find('build/header_guard') != -1:
self.fail('Unexpected error: %s' % line)
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data('mydir/Foo_flymake.h', 'h', [], error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'No #ifndef header guard found, suggested CPP variable is: %s'
' [build/header_guard] [5]' % expected_guard),
error_collector.result_list())
# Verify that we don't blindly suggest the WTF prefix for all headers.
self.assertFalse(expected_guard.startswith('WTF_'))
# Allow the WTF_ prefix for files in that directory.
header_guard_filter = FilterConfiguration(('-', '+build/header_guard'))
error_collector = ErrorCollector(self.assertTrue, header_guard_filter)
self.process_file_data('Source/JavaScriptCore/wtf/TestName.h', 'h',
['#ifndef WTF_TestName_h', '#define WTF_TestName_h'],
error_collector)
self.assertEqual(0, len(error_collector.result_list()),
error_collector.result_list())
# Also allow the non WTF_ prefix for files in that directory.
error_collector = ErrorCollector(self.assertTrue, header_guard_filter)
self.process_file_data('Source/JavaScriptCore/wtf/TestName.h', 'h',
['#ifndef TestName_h', '#define TestName_h'],
error_collector)
self.assertEqual(0, len(error_collector.result_list()),
error_collector.result_list())
# Verify that we suggest the WTF prefix version.
error_collector = ErrorCollector(self.assertTrue, header_guard_filter)
self.process_file_data('Source/JavaScriptCore/wtf/TestName.h', 'h',
['#ifndef BAD_TestName_h', '#define BAD_TestName_h'],
error_collector)
self.assertEqual(
1,
error_collector.result_list().count(
'#ifndef header guard has wrong style, please use: WTF_TestName_h'
' [build/header_guard] [5]'),
error_collector.result_list())
def test_build_printf_format(self):
self.assert_lint(
r'printf("\%%d", value);',
'%, [, (, and { are undefined character escapes. Unescape them.'
' [build/printf_format] [3]')
self.assert_lint(
r'snprintf(buffer, sizeof(buffer), "\[%d", value);',
'%, [, (, and { are undefined character escapes. Unescape them.'
' [build/printf_format] [3]')
self.assert_lint(
r'fprintf(file, "\(%d", value);',
'%, [, (, and { are undefined character escapes. Unescape them.'
' [build/printf_format] [3]')
self.assert_lint(
r'vsnprintf(buffer, sizeof(buffer), "\\\{%d", ap);',
'%, [, (, and { are undefined character escapes. Unescape them.'
' [build/printf_format] [3]')
# Don't warn if double-slash precedes the symbol
self.assert_lint(r'printf("\\%%%d", value);',
'')
def test_runtime_printf_format(self):
self.assert_lint(
r'fprintf(file, "%q", value);',
'%q in format strings is deprecated. Use %ll instead.'
' [runtime/printf_format] [3]')
self.assert_lint(
r'aprintf(file, "The number is %12q", value);',
'%q in format strings is deprecated. Use %ll instead.'
' [runtime/printf_format] [3]')
self.assert_lint(
r'printf(file, "The number is" "%-12q", value);',
'%q in format strings is deprecated. Use %ll instead.'
' [runtime/printf_format] [3]')
self.assert_lint(
r'printf(file, "The number is" "%+12q", value);',
'%q in format strings is deprecated. Use %ll instead.'
' [runtime/printf_format] [3]')
self.assert_lint(
r'printf(file, "The number is" "% 12q", value);',
'%q in format strings is deprecated. Use %ll instead.'
' [runtime/printf_format] [3]')
self.assert_lint(
r'snprintf(file, "Never mix %d and %1$d parmaeters!", value);',
'%N$ formats are unconventional. Try rewriting to avoid them.'
' [runtime/printf_format] [2]')
def assert_lintLogCodeOnError(self, code, expected_message):
# Special assert_lint which logs the input code on error.
result = self.perform_single_line_lint(code, 'foo.cpp')
if result != expected_message:
self.fail('For code: "%s"\nGot: "%s"\nExpected: "%s"'
% (code, result, expected_message))
def test_build_storage_class(self):
qualifiers = [None, 'const', 'volatile']
signs = [None, 'signed', 'unsigned']
types = ['void', 'char', 'int', 'float', 'double',
'schar', 'int8', 'uint8', 'int16', 'uint16',
'int32', 'uint32', 'int64', 'uint64']
storage_classes = ['auto', 'extern', 'register', 'static', 'typedef']
build_storage_class_error_message = (
'Storage class (static, extern, typedef, etc) should be first.'
' [build/storage_class] [5]')
# Some explicit cases. Legal in C++, deprecated in C99.
self.assert_lint('const int static foo = 5;',
build_storage_class_error_message)
self.assert_lint('char static foo;',
build_storage_class_error_message)
self.assert_lint('double const static foo = 2.0;',
build_storage_class_error_message)
self.assert_lint('uint64 typedef unsignedLongLong;',
build_storage_class_error_message)
self.assert_lint('int register foo = 0;',
build_storage_class_error_message)
# Since there are a very large number of possibilities, randomly
# construct declarations.
# Make sure that the declaration is logged if there's an error.
# Seed generator with an integer for absolute reproducibility.
random.seed(25)
for unused_i in range(10):
# Build up random list of non-storage-class declaration specs.
other_decl_specs = [random.choice(qualifiers), random.choice(signs),
random.choice(types)]
# remove None
other_decl_specs = filter(lambda x: x is not None, other_decl_specs)
# shuffle
random.shuffle(other_decl_specs)
# insert storage class after the first
storage_class = random.choice(storage_classes)
insertion_point = random.randint(1, len(other_decl_specs))
decl_specs = (other_decl_specs[0:insertion_point]
+ [storage_class]
+ other_decl_specs[insertion_point:])
self.assert_lintLogCodeOnError(
' '.join(decl_specs) + ';',
build_storage_class_error_message)
# but no error if storage class is first
self.assert_lintLogCodeOnError(
storage_class + ' ' + ' '.join(other_decl_specs),
'')
def test_legal_copyright(self):
legal_copyright_message = (
'No copyright message found. '
'You should have a line: "Copyright [year] <Copyright Owner>"'
' [legal/copyright] [5]')
copyright_line = '// Copyright 2008 Google Inc. All Rights Reserved.'
file_path = 'mydir/googleclient/foo.cpp'
# There should be a copyright message in the first 10 lines
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'cpp', [], error_collector)
self.assertEqual(
1,
error_collector.result_list().count(legal_copyright_message))
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(
file_path, 'cpp',
['' for unused_i in range(10)] + [copyright_line],
error_collector)
self.assertEqual(
1,
error_collector.result_list().count(legal_copyright_message))
# Test that warning isn't issued if Copyright line appears early enough.
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(file_path, 'cpp', [copyright_line], error_collector)
for message in error_collector.result_list():
if message.find('legal/copyright') != -1:
self.fail('Unexpected error: %s' % message)
error_collector = ErrorCollector(self.assertTrue)
self.process_file_data(
file_path, 'cpp',
['' for unused_i in range(9)] + [copyright_line],
error_collector)
for message in error_collector.result_list():
if message.find('legal/copyright') != -1:
self.fail('Unexpected error: %s' % message)
def test_invalid_increment(self):
self.assert_lint('*count++;',
'Changing pointer instead of value (or unused value of '
'operator*). [runtime/invalid_increment] [5]')
# Integral bitfields must be declared with either signed or unsigned keyword.
def test_plain_integral_bitfields(self):
errmsg = ('Please declare integral type bitfields with either signed or unsigned. [runtime/bitfields] [5]')
self.assert_lint('int a : 30;', errmsg)
self.assert_lint('mutable short a : 14;', errmsg)
self.assert_lint('const char a : 6;', errmsg)
self.assert_lint('long int a : 30;', errmsg)
self.assert_lint('int a = 1 ? 0 : 30;', '')
class CleansedLinesTest(unittest.TestCase):
def test_init(self):
lines = ['Line 1',
'Line 2',
'Line 3 // Comment test',
'Line 4 "foo"']
clean_lines = cpp_style.CleansedLines(lines)
self.assertEqual(lines, clean_lines.raw_lines)
self.assertEqual(4, clean_lines.num_lines())
self.assertEqual(['Line 1',
'Line 2',
'Line 3 ',
'Line 4 "foo"'],
clean_lines.lines)
self.assertEqual(['Line 1',
'Line 2',
'Line 3 ',
'Line 4 ""'],
clean_lines.elided)
def test_init_empty(self):
clean_lines = cpp_style.CleansedLines([])
self.assertEqual([], clean_lines.raw_lines)
self.assertEqual(0, clean_lines.num_lines())
def test_collapse_strings(self):
collapse = cpp_style.CleansedLines.collapse_strings
self.assertEqual('""', collapse('""')) # "" (empty)
self.assertEqual('"""', collapse('"""')) # """ (bad)
self.assertEqual('""', collapse('"xyz"')) # "xyz" (string)
self.assertEqual('""', collapse('"\\\""')) # "\"" (string)
self.assertEqual('""', collapse('"\'"')) # "'" (string)
self.assertEqual('"\"', collapse('"\"')) # "\" (bad)
self.assertEqual('""', collapse('"\\\\"')) # "\\" (string)
self.assertEqual('"', collapse('"\\\\\\"')) # "\\\" (bad)
self.assertEqual('""', collapse('"\\\\\\\\"')) # "\\\\" (string)
self.assertEqual('\'\'', collapse('\'\'')) # '' (empty)
self.assertEqual('\'\'', collapse('\'a\'')) # 'a' (char)
self.assertEqual('\'\'', collapse('\'\\\'\'')) # '\'' (char)
self.assertEqual('\'', collapse('\'\\\'')) # '\' (bad)
self.assertEqual('', collapse('\\012')) # '\012' (char)
self.assertEqual('', collapse('\\xfF0')) # '\xfF0' (char)
self.assertEqual('', collapse('\\n')) # '\n' (char)
self.assertEqual('\#', collapse('\\#')) # '\#' (bad)
self.assertEqual('StringReplace(body, "", "");',
collapse('StringReplace(body, "\\\\", "\\\\\\\\");'))
self.assertEqual('\'\' ""',
collapse('\'"\' "foo"'))
class OrderOfIncludesTest(CppStyleTestBase):
def setUp(self):
self.include_state = cpp_style._IncludeState()
# Cheat os.path.abspath called in FileInfo class.
self.os_path_abspath_orig = os.path.abspath
os.path.abspath = lambda value: value
def tearDown(self):
os.path.abspath = self.os_path_abspath_orig
def test_try_drop_common_suffixes(self):
self.assertEqual('foo/foo', cpp_style._drop_common_suffixes('foo/foo-inl.h'))
self.assertEqual('foo/bar/foo',
cpp_style._drop_common_suffixes('foo/bar/foo_inl.h'))
self.assertEqual('foo/foo', cpp_style._drop_common_suffixes('foo/foo.cpp'))
self.assertEqual('foo/foo_unusualinternal',
cpp_style._drop_common_suffixes('foo/foo_unusualinternal.h'))
self.assertEqual('',
cpp_style._drop_common_suffixes('_test.cpp'))
self.assertEqual('test',
cpp_style._drop_common_suffixes('test.cpp'))
class OrderOfIncludesTest(CppStyleTestBase):
def setUp(self):
self.include_state = cpp_style._IncludeState()
# Cheat os.path.abspath called in FileInfo class.
self.os_path_abspath_orig = os.path.abspath
self.os_path_isfile_orig = os.path.isfile
os.path.abspath = lambda value: value
def tearDown(self):
os.path.abspath = self.os_path_abspath_orig
os.path.isfile = self.os_path_isfile_orig
def test_check_next_include_order__no_config(self):
self.assertEqual('Header file should not contain WebCore config.h.',
self.include_state.check_next_include_order(cpp_style._CONFIG_HEADER, True, True))
def test_check_next_include_order__no_self(self):
self.assertEqual('Header file should not contain itself.',
self.include_state.check_next_include_order(cpp_style._PRIMARY_HEADER, True, True))
# Test actual code to make sure that header types are correctly assigned.
self.assert_language_rules_check('Foo.h',
'#include "Foo.h"\n',
'Header file should not contain itself. Should be: alphabetically sorted.'
' [build/include_order] [4]')
self.assert_language_rules_check('FooBar.h',
'#include "Foo.h"\n',
'')
def test_check_next_include_order__likely_then_config(self):
self.assertEqual('Found header this file implements before WebCore config.h.',
self.include_state.check_next_include_order(cpp_style._PRIMARY_HEADER, False, True))
self.assertEqual('Found WebCore config.h after a header this file implements.',
self.include_state.check_next_include_order(cpp_style._CONFIG_HEADER, False, True))
def test_check_next_include_order__other_then_config(self):
self.assertEqual('Found other header before WebCore config.h.',
self.include_state.check_next_include_order(cpp_style._OTHER_HEADER, False, True))
self.assertEqual('Found WebCore config.h after other header.',
self.include_state.check_next_include_order(cpp_style._CONFIG_HEADER, False, True))
def test_check_next_include_order__config_then_other_then_likely(self):
self.assertEqual('', self.include_state.check_next_include_order(cpp_style._CONFIG_HEADER, False, True))
self.assertEqual('Found other header before a header this file implements.',
self.include_state.check_next_include_order(cpp_style._OTHER_HEADER, False, True))
self.assertEqual('Found header this file implements after other header.',
self.include_state.check_next_include_order(cpp_style._PRIMARY_HEADER, False, True))
def test_check_alphabetical_include_order(self):
self.assert_language_rules_check('foo.h',
'#include "a.h"\n'
'#include "c.h"\n'
'#include "b.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
self.assert_language_rules_check('foo.h',
'#include "a.h"\n'
'#include "b.h"\n'
'#include "c.h"\n',
'')
self.assert_language_rules_check('foo.h',
'#include <assert.h>\n'
'#include "bar.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
self.assert_language_rules_check('foo.h',
'#include "bar.h"\n'
'#include <assert.h>\n',
'')
def test_check_alphabetical_include_order_errors_reported_for_both_lines(self):
# If one of the two lines of out of order headers are filtered, the error should be
# reported on the other line.
self.assert_language_rules_check('foo.h',
'#include "a.h"\n'
'#include "c.h"\n'
'#include "b.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]',
lines_to_check=[2])
self.assert_language_rules_check('foo.h',
'#include "a.h"\n'
'#include "c.h"\n'
'#include "b.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]',
lines_to_check=[3])
# If no lines are filtered, the error should be reported only once.
self.assert_language_rules_check('foo.h',
'#include "a.h"\n'
'#include "c.h"\n'
'#include "b.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
def test_check_line_break_after_own_header(self):
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'#include "bar.h"\n',
'You should add a blank line after implementation file\'s own header. [build/include_order] [4]')
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include "bar.h"\n',
'')
def test_check_preprocessor_in_include_section(self):
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#ifdef BAZ\n'
'#include "baz.h"\n'
'#else\n'
'#include "foobar.h"\n'
'#endif"\n'
'#include "bar.h"\n', # No flag because previous is in preprocessor section
'')
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#ifdef BAZ\n'
'#include "baz.h"\n'
'#endif"\n'
'#include "bar.h"\n'
'#include "a.h"\n', # Should still flag this.
'Alphabetical sorting problem. [build/include_order] [4]')
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#ifdef BAZ\n'
'#include "baz.h"\n'
'#include "bar.h"\n' #Should still flag this
'#endif"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#ifdef BAZ\n'
'#include "baz.h"\n'
'#endif"\n'
'#ifdef FOOBAR\n'
'#include "foobar.h"\n'
'#endif"\n'
'#include "bar.h"\n'
'#include "a.h"\n', # Should still flag this.
'Alphabetical sorting problem. [build/include_order] [4]')
# Check that after an already included error, the sorting rules still work.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include "foo.h"\n'
'#include "g.h"\n',
'"foo.h" already included at foo.cpp:2 [build/include] [4]')
def test_primary_header(self):
# File with non-existing primary header should not produce errors.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'\n'
'#include "bar.h"\n',
'')
# Pretend that header files exist.
os.path.isfile = lambda filename: True
# Missing include for existing primary header -> error.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'\n'
'#include "bar.h"\n',
'Found other header before a header this file implements. '
'Should be: config.h, primary header, blank line, and then '
'alphabetically sorted. [build/include_order] [4]')
# Having include for existing primary header -> no error.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include "bar.h"\n',
'')
os.path.isfile = self.os_path_isfile_orig
def test_public_primary_header(self):
# System header is not considered a primary header.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include <other/foo.h>\n'
'\n'
'#include "a.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
# ...except that it starts with public/.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include <public/foo.h>\n'
'\n'
'#include "a.h"\n',
'')
# Even if it starts with public/ its base part must match with the source file name.
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include <public/foop.h>\n'
'\n'
'#include "a.h"\n',
'Alphabetical sorting problem. [build/include_order] [4]')
def test_check_wtf_includes(self):
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include <wtf/Assertions.h>\n',
'')
self.assert_language_rules_check('foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include "wtf/Assertions.h"\n',
'wtf includes should be <wtf/file.h> instead of "wtf/file.h".'
' [build/include] [4]')
def test_check_cc_includes(self):
self.assert_language_rules_check('bar/chromium/foo.cpp',
'#include "config.h"\n'
'#include "foo.h"\n'
'\n'
'#include "cc/CCProxy.h"\n',
'cc includes should be "CCFoo.h" instead of "cc/CCFoo.h".'
' [build/include] [4]')
def test_classify_include(self):
classify_include = cpp_style._classify_include
include_state = cpp_style._IncludeState()
self.assertEqual(cpp_style._CONFIG_HEADER,
classify_include('foo/foo.cpp',
'config.h',
False, include_state))
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('foo/internal/foo.cpp',
'foo/public/foo.h',
False, include_state))
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('foo/internal/foo.cpp',
'foo/other/public/foo.h',
False, include_state))
self.assertEqual(cpp_style._OTHER_HEADER,
classify_include('foo/internal/foo.cpp',
'foo/other/public/foop.h',
False, include_state))
self.assertEqual(cpp_style._OTHER_HEADER,
classify_include('foo/foo.cpp',
'string',
True, include_state))
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('fooCustom.cpp',
'foo.h',
False, include_state))
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('PrefixFooCustom.cpp',
'Foo.h',
False, include_state))
self.assertEqual(cpp_style._MOC_HEADER,
classify_include('foo.cpp',
'foo.moc',
False, include_state))
self.assertEqual(cpp_style._MOC_HEADER,
classify_include('foo.cpp',
'moc_foo.cpp',
False, include_state))
# <public/foo.h> must be considered as primary even if is_system is True.
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('foo/foo.cpp',
'public/foo.h',
True, include_state))
self.assertEqual(cpp_style._OTHER_HEADER,
classify_include('foo.cpp',
'foo.h',
True, include_state))
self.assertEqual(cpp_style._OTHER_HEADER,
classify_include('foo.cpp',
'public/foop.h',
True, include_state))
# Qt private APIs use _p.h suffix.
self.assertEqual(cpp_style._PRIMARY_HEADER,
classify_include('foo.cpp',
'foo_p.h',
False, include_state))
# Tricky example where both includes might be classified as primary.
self.assert_language_rules_check('ScrollbarThemeWince.cpp',
'#include "config.h"\n'
'#include "ScrollbarThemeWince.h"\n'
'\n'
'#include "Scrollbar.h"\n',
'')
self.assert_language_rules_check('ScrollbarThemeWince.cpp',
'#include "config.h"\n'
'#include "Scrollbar.h"\n'
'\n'
'#include "ScrollbarThemeWince.h"\n',
'Found header this file implements after a header this file implements.'
' Should be: config.h, primary header, blank line, and then alphabetically sorted.'
' [build/include_order] [4]')
self.assert_language_rules_check('ResourceHandleWin.cpp',
'#include "config.h"\n'
'#include "ResourceHandle.h"\n'
'\n'
'#include "ResourceHandleWin.h"\n',
'')
def test_try_drop_common_suffixes(self):
self.assertEqual('foo/foo', cpp_style._drop_common_suffixes('foo/foo-inl.h'))
self.assertEqual('foo/bar/foo',
cpp_style._drop_common_suffixes('foo/bar/foo_inl.h'))
self.assertEqual('foo/foo', cpp_style._drop_common_suffixes('foo/foo.cpp'))
self.assertEqual('foo/foo_unusualinternal',
cpp_style._drop_common_suffixes('foo/foo_unusualinternal.h'))
self.assertEqual('',
cpp_style._drop_common_suffixes('_test.cpp'))
self.assertEqual('test',
cpp_style._drop_common_suffixes('test.cpp'))
self.assertEqual('test',
cpp_style._drop_common_suffixes('test.cpp'))
class CheckForFunctionLengthsTest(CppStyleTestBase):
def setUp(self):
# Reducing these thresholds for the tests speeds up tests significantly.
self.old_normal_trigger = cpp_style._FunctionState._NORMAL_TRIGGER
self.old_test_trigger = cpp_style._FunctionState._TEST_TRIGGER
cpp_style._FunctionState._NORMAL_TRIGGER = 10
cpp_style._FunctionState._TEST_TRIGGER = 25
def tearDown(self):
cpp_style._FunctionState._NORMAL_TRIGGER = self.old_normal_trigger
cpp_style._FunctionState._TEST_TRIGGER = self.old_test_trigger
# FIXME: Eliminate the need for this function.
def set_min_confidence(self, min_confidence):
"""Set new test confidence and return old test confidence."""
old_min_confidence = self.min_confidence
self.min_confidence = min_confidence
return old_min_confidence
def assert_function_lengths_check(self, code, expected_message):
"""Check warnings for long function bodies are as expected.
Args:
code: C++ source code expected to generate a warning message.
expected_message: Message expected to be generated by the C++ code.
"""
self.assertEqual(expected_message,
self.perform_function_lengths_check(code))
def trigger_lines(self, error_level):
"""Return number of lines needed to trigger a function length warning.
Args:
error_level: --v setting for cpp_style.
Returns:
Number of lines needed to trigger a function length warning.
"""
return cpp_style._FunctionState._NORMAL_TRIGGER * 2 ** error_level
def trigger_test_lines(self, error_level):
"""Return number of lines needed to trigger a test function length warning.
Args:
error_level: --v setting for cpp_style.
Returns:
Number of lines needed to trigger a test function length warning.
"""
return cpp_style._FunctionState._TEST_TRIGGER * 2 ** error_level
def assert_function_length_check_definition(self, lines, error_level):
"""Generate long function definition and check warnings are as expected.
Args:
lines: Number of lines to generate.
error_level: --v setting for cpp_style.
"""
trigger_level = self.trigger_lines(self.min_confidence)
self.assert_function_lengths_check(
'void test(int x)' + self.function_body(lines),
('Small and focused functions are preferred: '
'test() has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]'
% (lines, trigger_level, error_level)))
def assert_function_length_check_definition_ok(self, lines):
"""Generate shorter function definition and check no warning is produced.
Args:
lines: Number of lines to generate.
"""
self.assert_function_lengths_check(
'void test(int x)' + self.function_body(lines),
'')
def assert_function_length_check_at_error_level(self, error_level):
"""Generate and check function at the trigger level for --v setting.
Args:
error_level: --v setting for cpp_style.
"""
self.assert_function_length_check_definition(self.trigger_lines(error_level),
error_level)
def assert_function_length_check_below_error_level(self, error_level):
"""Generate and check function just below the trigger level for --v setting.
Args:
error_level: --v setting for cpp_style.
"""
self.assert_function_length_check_definition(self.trigger_lines(error_level) - 1,
error_level - 1)
def assert_function_length_check_above_error_level(self, error_level):
"""Generate and check function just above the trigger level for --v setting.
Args:
error_level: --v setting for cpp_style.
"""
self.assert_function_length_check_definition(self.trigger_lines(error_level) + 1,
error_level)
def function_body(self, number_of_lines):
return ' {\n' + ' this_is_just_a_test();\n' * number_of_lines + '}'
def function_body_with_blank_lines(self, number_of_lines):
return ' {\n' + ' this_is_just_a_test();\n\n' * number_of_lines + '}'
def function_body_with_no_lints(self, number_of_lines):
return ' {\n' + ' this_is_just_a_test(); // NOLINT\n' * number_of_lines + '}'
# Test line length checks.
def test_function_length_check_declaration(self):
self.assert_function_lengths_check(
'void test();', # Not a function definition
'')
def test_function_length_check_declaration_with_block_following(self):
self.assert_function_lengths_check(
('void test();\n'
+ self.function_body(66)), # Not a function definition
'')
def test_function_length_check_class_definition(self):
self.assert_function_lengths_check( # Not a function definition
'class Test' + self.function_body(66) + ';',
'')
def test_function_length_check_trivial(self):
self.assert_function_lengths_check(
'void test() {}', # Not counted
'')
def test_function_length_check_empty(self):
self.assert_function_lengths_check(
'void test() {\n}',
'')
def test_function_length_check_definition_below_severity0(self):
old_min_confidence = self.set_min_confidence(0)
self.assert_function_length_check_definition_ok(self.trigger_lines(0) - 1)
self.set_min_confidence(old_min_confidence)
def test_function_length_check_definition_at_severity0(self):
old_min_confidence = self.set_min_confidence(0)
self.assert_function_length_check_definition_ok(self.trigger_lines(0))
self.set_min_confidence(old_min_confidence)
def test_function_length_check_definition_above_severity0(self):
old_min_confidence = self.set_min_confidence(0)
self.assert_function_length_check_above_error_level(0)
self.set_min_confidence(old_min_confidence)
def test_function_length_check_definition_below_severity1v0(self):
old_min_confidence = self.set_min_confidence(0)
self.assert_function_length_check_below_error_level(1)
self.set_min_confidence(old_min_confidence)
def test_function_length_check_definition_at_severity1v0(self):
old_min_confidence = self.set_min_confidence(0)
self.assert_function_length_check_at_error_level(1)
self.set_min_confidence(old_min_confidence)
def test_function_length_check_definition_below_severity1(self):
self.assert_function_length_check_definition_ok(self.trigger_lines(1) - 1)
def test_function_length_check_definition_at_severity1(self):
self.assert_function_length_check_definition_ok(self.trigger_lines(1))
def test_function_length_check_definition_above_severity1(self):
self.assert_function_length_check_above_error_level(1)
def test_function_length_check_definition_severity1_plus_indented(self):
error_level = 1
error_lines = self.trigger_lines(error_level) + 1
trigger_level = self.trigger_lines(self.min_confidence)
indent_spaces = ' '
self.assert_function_lengths_check(
re.sub(r'(?m)^(.)', indent_spaces + r'\1',
'void test_indent(int x)\n' + self.function_body(error_lines)),
('Small and focused functions are preferred: '
'test_indent() has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_definition_severity1_plus_blanks(self):
error_level = 1
error_lines = self.trigger_lines(error_level) + 1
trigger_level = self.trigger_lines(self.min_confidence)
self.assert_function_lengths_check(
'void test_blanks(int x)' + self.function_body(error_lines),
('Small and focused functions are preferred: '
'test_blanks() has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_complex_definition_severity1(self):
error_level = 1
error_lines = self.trigger_lines(error_level) + 1
trigger_level = self.trigger_lines(self.min_confidence)
self.assert_function_lengths_check(
('my_namespace::my_other_namespace::MyVeryLongTypeName<Type1, bool func(const Element*)>*\n'
'my_namespace::my_other_namespace<Type3, Type4>::~MyFunction<Type5<Type6, Type7> >(int arg1, char* arg2)'
+ self.function_body(error_lines)),
('Small and focused functions are preferred: '
'my_namespace::my_other_namespace<Type3, Type4>::~MyFunction<Type5<Type6, Type7> >()'
' has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_definition_severity1_for_test(self):
error_level = 1
error_lines = self.trigger_test_lines(error_level) + 1
trigger_level = self.trigger_test_lines(self.min_confidence)
self.assert_function_lengths_check(
'TEST_F(Test, Mutator)' + self.function_body(error_lines),
('Small and focused functions are preferred: '
'TEST_F(Test, Mutator) has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_definition_severity1_for_split_line_test(self):
error_level = 1
error_lines = self.trigger_test_lines(error_level) + 1
trigger_level = self.trigger_test_lines(self.min_confidence)
self.assert_function_lengths_check(
('TEST_F(GoogleUpdateRecoveryRegistryProtectedTest,\n'
' FixGoogleUpdate_AllValues_MachineApp)' # note: 4 spaces
+ self.function_body(error_lines)),
('Small and focused functions are preferred: '
'TEST_F(GoogleUpdateRecoveryRegistryProtectedTest, ' # 1 space
'FixGoogleUpdate_AllValues_MachineApp) has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_definition_severity1_for_bad_test_doesnt_break(self):
error_level = 1
error_lines = self.trigger_test_lines(error_level) + 1
trigger_level = self.trigger_test_lines(self.min_confidence)
# Since the function name isn't valid, the function detection algorithm
# will skip it, so no error is produced.
self.assert_function_lengths_check(
('TEST_F('
+ self.function_body(error_lines)),
'')
def test_function_length_check_definition_severity1_with_embedded_no_lints(self):
error_level = 1
error_lines = self.trigger_lines(error_level) + 1
trigger_level = self.trigger_lines(self.min_confidence)
self.assert_function_lengths_check(
'void test(int x)' + self.function_body_with_no_lints(error_lines),
('Small and focused functions are preferred: '
'test() has %d non-comment lines '
'(error triggered by exceeding %d lines).'
' [readability/fn_size] [%d]')
% (error_lines, trigger_level, error_level))
def test_function_length_check_definition_severity1_with_no_lint(self):
self.assert_function_lengths_check(
('void test(int x)' + self.function_body(self.trigger_lines(1))
+ ' // NOLINT -- long function'),
'')
def test_function_length_check_definition_below_severity2(self):
self.assert_function_length_check_below_error_level(2)
def test_function_length_check_definition_severity2(self):
self.assert_function_length_check_at_error_level(2)
def test_function_length_check_definition_above_severity2(self):
self.assert_function_length_check_above_error_level(2)
def test_function_length_check_definition_below_severity3(self):
self.assert_function_length_check_below_error_level(3)
def test_function_length_check_definition_severity3(self):
self.assert_function_length_check_at_error_level(3)
def test_function_length_check_definition_above_severity3(self):
self.assert_function_length_check_above_error_level(3)
def test_function_length_check_definition_below_severity4(self):
self.assert_function_length_check_below_error_level(4)
def test_function_length_check_definition_severity4(self):
self.assert_function_length_check_at_error_level(4)
def test_function_length_check_definition_above_severity4(self):
self.assert_function_length_check_above_error_level(4)
def test_function_length_check_definition_below_severity5(self):
self.assert_function_length_check_below_error_level(5)
def test_function_length_check_definition_at_severity5(self):
self.assert_function_length_check_at_error_level(5)
def test_function_length_check_definition_above_severity5(self):
self.assert_function_length_check_above_error_level(5)
def test_function_length_check_definition_huge_lines(self):
# 5 is the limit
self.assert_function_length_check_definition(self.trigger_lines(6), 5)
def test_function_length_not_determinable(self):
# Macro invocation without terminating semicolon.
self.assert_function_lengths_check(
'MACRO(arg)',
'')
# Macro with underscores
self.assert_function_lengths_check(
'MACRO_WITH_UNDERSCORES(arg1, arg2, arg3)',
'')
self.assert_function_lengths_check(
'NonMacro(arg)',
'Lint failed to find start of function body.'
' [readability/fn_size] [5]')
class NoNonVirtualDestructorsTest(CppStyleTestBase):
def test_no_error(self):
self.assert_multi_line_lint(
'''\
class Foo {
virtual ~Foo();
virtual void foo();
};''',
'')
self.assert_multi_line_lint(
'''\
class Foo {
virtual inline ~Foo();
virtual void foo();
};''',
'')
self.assert_multi_line_lint(
'''\
class Foo {
inline virtual ~Foo();
virtual void foo();
};''',
'')
self.assert_multi_line_lint(
'''\
class Foo::Goo {
virtual ~Goo();
virtual void goo();
};''',
'')
self.assert_multi_line_lint(
'class Foo { void foo(); };',
'More than one command on the same line [whitespace/newline] [4]')
self.assert_multi_line_lint(
'class MyClass {\n'
' int getIntValue() { ASSERT(m_ptr); return *m_ptr; }\n'
'};\n',
'')
self.assert_multi_line_lint(
'class MyClass {\n'
' int getIntValue()\n'
' {\n'
' ASSERT(m_ptr); return *m_ptr;\n'
' }\n'
'};\n',
'More than one command on the same line [whitespace/newline] [4]')
self.assert_multi_line_lint(
'''\
class Qualified::Goo : public Foo {
virtual void goo();
};''',
'')
def test_no_destructor_when_virtual_needed(self):
self.assert_multi_line_lint_re(
'''\
class Foo {
virtual void foo();
};''',
'The class Foo probably needs a virtual destructor')
def test_destructor_non_virtual_when_virtual_needed(self):
self.assert_multi_line_lint_re(
'''\
class Foo {
~Foo();
virtual void foo();
};''',
'The class Foo probably needs a virtual destructor')
def test_no_warn_when_derived(self):
self.assert_multi_line_lint(
'''\
class Foo : public Goo {
virtual void foo();
};''',
'')
def test_internal_braces(self):
self.assert_multi_line_lint_re(
'''\
class Foo {
enum Goo {
GOO
};
virtual void foo();
};''',
'The class Foo probably needs a virtual destructor')
def test_inner_class_needs_virtual_destructor(self):
self.assert_multi_line_lint_re(
'''\
class Foo {
class Goo {
virtual void goo();
};
};''',
'The class Goo probably needs a virtual destructor')
def test_outer_class_needs_virtual_destructor(self):
self.assert_multi_line_lint_re(
'''\
class Foo {
class Goo {
};
virtual void foo();
};''',
'The class Foo probably needs a virtual destructor')
def test_qualified_class_needs_virtual_destructor(self):
self.assert_multi_line_lint_re(
'''\
class Qualified::Foo {
virtual void foo();
};''',
'The class Qualified::Foo probably needs a virtual destructor')
def test_multi_line_declaration_no_error(self):
self.assert_multi_line_lint_re(
'''\
class Foo
: public Goo {
virtual void foo();
};''',
'')
def test_multi_line_declaration_with_error(self):
self.assert_multi_line_lint(
'''\
class Foo
{
virtual void foo();
};''',
['This { should be at the end of the previous line '
'[whitespace/braces] [4]',
'The class Foo probably needs a virtual destructor due to having '
'virtual method(s), one declared at line 3. [runtime/virtual] [4]'])
class PassPtrTest(CppStyleTestBase):
# For http://webkit.org/coding/RefPtr.html
def assert_pass_ptr_check(self, code, expected_message):
"""Check warnings for Pass*Ptr are as expected.
Args:
code: C++ source code expected to generate a warning message.
expected_message: Message expected to be generated by the C++ code.
"""
self.assertEqual(expected_message,
self.perform_pass_ptr_check(code))
def test_pass_ref_ptr_in_function(self):
self.assert_pass_ptr_check(
'int myFunction()\n'
'{\n'
' PassRefPtr<Type1> variable = variable2;\n'
'}',
'Local variables should never be PassRefPtr (see '
'http://webkit.org/coding/RefPtr.html). [readability/pass_ptr] [5]')
def test_pass_own_ptr_in_function(self):
self.assert_pass_ptr_check(
'int myFunction()\n'
'{\n'
' PassOwnPtr<Type1> variable = variable2;\n'
'}',
'Local variables should never be PassOwnPtr (see '
'http://webkit.org/coding/RefPtr.html). [readability/pass_ptr] [5]')
def test_pass_other_type_ptr_in_function(self):
self.assert_pass_ptr_check(
'int myFunction()\n'
'{\n'
' PassOtherTypePtr<Type1> variable;\n'
'}',
'Local variables should never be PassOtherTypePtr (see '
'http://webkit.org/coding/RefPtr.html). [readability/pass_ptr] [5]')
def test_pass_ref_ptr_return_value(self):
self.assert_pass_ptr_check(
'PassRefPtr<Type1>\n'
'myFunction(int)\n'
'{\n'
'}',
'')
self.assert_pass_ptr_check(
'PassRefPtr<Type1> myFunction(int)\n'
'{\n'
'}',
'')
self.assert_pass_ptr_check(
'PassRefPtr<Type1> myFunction();\n',
'')
self.assert_pass_ptr_check(
'OwnRefPtr<Type1> myFunction();\n',
'')
self.assert_pass_ptr_check(
'RefPtr<Type1> myFunction(int)\n'
'{\n'
'}',
'The return type should use PassRefPtr instead of RefPtr. [readability/pass_ptr] [5]')
self.assert_pass_ptr_check(
'OwnPtr<Type1> myFunction(int)\n'
'{\n'
'}',
'The return type should use PassOwnPtr instead of OwnPtr. [readability/pass_ptr] [5]')
def test_ref_ptr_parameter_value(self):
self.assert_pass_ptr_check(
'int myFunction(PassRefPtr<Type1>)\n'
'{\n'
'}',
'')
self.assert_pass_ptr_check(
'int myFunction(RefPtr<Type1>)\n'
'{\n'
'}',
'The parameter type should use PassRefPtr instead of RefPtr. [readability/pass_ptr] [5]')
self.assert_pass_ptr_check(
'int myFunction(RefPtr<Type1>&)\n'
'{\n'
'}',
'')
self.assert_pass_ptr_check(
'int myFunction(RefPtr<Type1>*)\n'
'{\n'
'}',
'')
def test_own_ptr_parameter_value(self):
self.assert_pass_ptr_check(
'int myFunction(PassOwnPtr<Type1>)\n'
'{\n'
'}',
'')
self.assert_pass_ptr_check(
'int myFunction(OwnPtr<Type1>)\n'
'{\n'
'}',
'The parameter type should use PassOwnPtr instead of OwnPtr. [readability/pass_ptr] [5]')
self.assert_pass_ptr_check(
'int myFunction(OwnPtr<Type1>& simple)\n'
'{\n'
'}',
'')
def test_ref_ptr_member_variable(self):
self.assert_pass_ptr_check(
'class Foo {'
' RefPtr<Type1> m_other;\n'
'};\n',
'')
class LeakyPatternTest(CppStyleTestBase):
def assert_leaky_pattern_check(self, code, expected_message):
"""Check warnings for leaky patterns are as expected.
Args:
code: C++ source code expected to generate a warning message.
expected_message: Message expected to be generated by the C++ code.
"""
self.assertEqual(expected_message,
self.perform_leaky_pattern_check(code))
def test_get_dc(self):
self.assert_leaky_pattern_check(
'HDC hdc = GetDC(hwnd);',
'Use the class HWndDC instead of calling GetDC to avoid potential '
'memory leaks. [runtime/leaky_pattern] [5]')
def test_get_dc(self):
self.assert_leaky_pattern_check(
'HDC hdc = GetDCEx(hwnd, 0, 0);',
'Use the class HWndDC instead of calling GetDCEx to avoid potential '
'memory leaks. [runtime/leaky_pattern] [5]')
def test_own_get_dc(self):
self.assert_leaky_pattern_check(
'HWndDC hdc(hwnd);',
'')
def test_create_dc(self):
self.assert_leaky_pattern_check(
'HDC dc2 = ::CreateDC();',
'Use adoptPtr and OwnPtr<HDC> when calling CreateDC to avoid potential '
'memory leaks. [runtime/leaky_pattern] [5]')
self.assert_leaky_pattern_check(
'adoptPtr(CreateDC());',
'')
def test_create_compatible_dc(self):
self.assert_leaky_pattern_check(
'HDC dc2 = CreateCompatibleDC(dc);',
'Use adoptPtr and OwnPtr<HDC> when calling CreateCompatibleDC to avoid potential '
'memory leaks. [runtime/leaky_pattern] [5]')
self.assert_leaky_pattern_check(
'adoptPtr(CreateCompatibleDC(dc));',
'')
class WebKitStyleTest(CppStyleTestBase):
# for http://webkit.org/coding/coding-style.html
def test_indentation(self):
# 1. Use spaces, not tabs. Tabs should only appear in files that
# require them for semantic meaning, like Makefiles.
self.assert_multi_line_lint(
'class Foo {\n'
' int goo;\n'
'};',
'')
self.assert_multi_line_lint(
'class Foo {\n'
'\tint goo;\n'
'};',
'Tab found; better to use spaces [whitespace/tab] [1]')
# 2. The indent size is 4 spaces.
self.assert_multi_line_lint(
'class Foo {\n'
' int goo;\n'
'};',
'')
self.assert_multi_line_lint(
'class Foo {\n'
' int goo;\n'
'};',
'Weird number of spaces at line-start. Are you using a 4-space indent? [whitespace/indent] [3]')
# 3. In a header, code inside a namespace should not be indented.
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
'class Document {\n'
' int myVariable;\n'
'};\n'
'}',
'',
'foo.h')
self.assert_multi_line_lint(
'namespace OuterNamespace {\n'
' namespace InnerNamespace {\n'
' class Document {\n'
'};\n'
'};\n'
'}',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.h')
self.assert_multi_line_lint(
'namespace OuterNamespace {\n'
' class Document {\n'
' namespace InnerNamespace {\n'
'};\n'
'};\n'
'}',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.h')
self.assert_multi_line_lint(
'namespace WebCore {\n'
'#if 0\n'
' class Document {\n'
'};\n'
'#endif\n'
'}',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.h')
self.assert_multi_line_lint(
'namespace WebCore {\n'
'class Document {\n'
'};\n'
'}',
'',
'foo.h')
# 4. In an implementation file (files with the extension .cpp, .c
# or .mm), code inside a namespace should not be indented.
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
'Document::Foo()\n'
' : foo(bar)\n'
' , boo(far)\n'
'{\n'
' stuff();\n'
'}',
'',
'foo.cpp')
self.assert_multi_line_lint(
'namespace OuterNamespace {\n'
'namespace InnerNamespace {\n'
'Document::Foo() { }\n'
' void* p;\n'
'}\n'
'}\n',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace OuterNamespace {\n'
'namespace InnerNamespace {\n'
'Document::Foo() { }\n'
'}\n'
' void* p;\n'
'}\n',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
' const char* foo = "start:;"\n'
' "dfsfsfs";\n'
'}\n',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
'const char* foo(void* a = ";", // ;\n'
' void* b);\n'
' void* p;\n'
'}\n',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
'const char* foo[] = {\n'
' "void* b);", // ;\n'
' "asfdf",\n'
' }\n'
' void* p;\n'
'}\n',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n\n'
'const char* foo[] = {\n'
' "void* b);", // }\n'
' "asfdf",\n'
' }\n'
'}\n',
'',
'foo.cpp')
self.assert_multi_line_lint(
' namespace WebCore {\n\n'
' void Document::Foo()\n'
' {\n'
'start: // infinite loops are fun!\n'
' goto start;\n'
' }',
'namespace should never be indented. [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n'
' Document::Foo() { }\n'
'}',
'Code inside a namespace should not be indented.'
' [whitespace/indent] [4]',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n'
'#define abc(x) x; \\\n'
' x\n'
'}',
'',
'foo.cpp')
self.assert_multi_line_lint(
'namespace WebCore {\n'
'#define abc(x) x; \\\n'
' x\n'
' void* x;'
'}',
'Code inside a namespace should not be indented. [whitespace/indent] [4]',
'foo.cpp')
# 5. A case label should line up with its switch statement. The
# case statement is indented.
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' case barCondition:\n'
' i++;\n'
' break;\n'
' default:\n'
' i--;\n'
' }\n',
'')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' switch (otherCondition) {\n'
' default:\n'
' return;\n'
' }\n'
' default:\n'
' i--;\n'
' }\n',
'')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition: break;\n'
' default: return;\n'
' }\n',
'')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' case barCondition:\n'
' i++;\n'
' break;\n'
' default:\n'
' i--;\n'
' }\n',
'A case label should not be indented, but line up with its switch statement.'
' [whitespace/indent] [4]')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' break;\n'
' default:\n'
' i--;\n'
' }\n',
'A case label should not be indented, but line up with its switch statement.'
' [whitespace/indent] [4]')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' case barCondition:\n'
' switch (otherCondition) {\n'
' default:\n'
' return;\n'
' }\n'
' default:\n'
' i--;\n'
' }\n',
'A case label should not be indented, but line up with its switch statement.'
' [whitespace/indent] [4]')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' case barCondition:\n'
' i++;\n'
' break;\n\n'
' default:\n'
' i--;\n'
' }\n',
'Non-label code inside switch statements should be indented.'
' [whitespace/indent] [4]')
self.assert_multi_line_lint(
' switch (condition) {\n'
' case fooCondition:\n'
' case barCondition:\n'
' switch (otherCondition) {\n'
' default:\n'
' return;\n'
' }\n'
' default:\n'
' i--;\n'
' }\n',
'Non-label code inside switch statements should be indented.'
' [whitespace/indent] [4]')
# 6. Boolean expressions at the same nesting level that span
# multiple lines should have their operators on the left side of
# the line instead of the right side.
self.assert_multi_line_lint(
' return attr->name() == srcAttr\n'
' || attr->name() == lowsrcAttr;\n',
'')
self.assert_multi_line_lint(
' return attr->name() == srcAttr ||\n'
' attr->name() == lowsrcAttr;\n',
'Boolean expressions that span multiple lines should have their '
'operators on the left side of the line instead of the right side.'
' [whitespace/operators] [4]')
def test_spacing(self):
# 1. Do not place spaces around unary operators.
self.assert_multi_line_lint(
'i++;',
'')
self.assert_multi_line_lint(
'i ++;',
'Extra space for operator ++; [whitespace/operators] [4]')
# 2. Do place spaces around binary and ternary operators.
self.assert_multi_line_lint(
'y = m * x + b;',
'')
self.assert_multi_line_lint(
'f(a, b);',
'')
self.assert_multi_line_lint(
'c = a | b;',
'')
self.assert_multi_line_lint(
'return condition ? 1 : 0;',
'')
self.assert_multi_line_lint(
'y=m*x+b;',
'Missing spaces around = [whitespace/operators] [4]')
self.assert_multi_line_lint(
'f(a,b);',
'Missing space after , [whitespace/comma] [3]')
self.assert_multi_line_lint(
'c = a|b;',
'Missing spaces around | [whitespace/operators] [3]')
# FIXME: We cannot catch this lint error.
# self.assert_multi_line_lint(
# 'return condition ? 1:0;',
# '')
# 3. Place spaces between control statements and their parentheses.
self.assert_multi_line_lint(
' if (condition)\n'
' doIt();\n',
'')
self.assert_multi_line_lint(
' if(condition)\n'
' doIt();\n',
'Missing space before ( in if( [whitespace/parens] [5]')
# 4. Do not place spaces between a function and its parentheses,
# or between a parenthesis and its content.
self.assert_multi_line_lint(
'f(a, b);',
'')
self.assert_multi_line_lint(
'f (a, b);',
'Extra space before ( in function call [whitespace/parens] [4]')
self.assert_multi_line_lint(
'f( a, b );',
['Extra space after ( in function call [whitespace/parens] [4]',
'Extra space before ) [whitespace/parens] [2]'])
def test_line_breaking(self):
# 1. Each statement should get its own line.
self.assert_multi_line_lint(
' x++;\n'
' y++;\n'
' if (condition);\n'
' doIt();\n',
'')
self.assert_multi_line_lint(
' if (condition) \\\n'
' doIt();\n',
'')
self.assert_multi_line_lint(
' x++; y++;',
'More than one command on the same line [whitespace/newline] [4]')
self.assert_multi_line_lint(
' if (condition) doIt();\n',
'More than one command on the same line in if [whitespace/parens] [4]')
# Ensure that having a # in the line doesn't hide the error.
self.assert_multi_line_lint(
' x++; char a[] = "#";',
'More than one command on the same line [whitespace/newline] [4]')
# Ignore preprocessor if's.
self.assert_multi_line_lint(
'#if (condition) || (condition2)\n',
'')
# 2. An else statement should go on the same line as a preceding
# close brace if one is present, else it should line up with the
# if statement.
self.assert_multi_line_lint(
'if (condition) {\n'
' doSomething();\n'
' doSomethingAgain();\n'
'} else {\n'
' doSomethingElse();\n'
' doSomethingElseAgain();\n'
'}\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else\n'
' doSomethingElse();\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else {\n'
' doSomethingElse();\n'
' doSomethingElseAgain();\n'
'}\n',
'')
self.assert_multi_line_lint(
'#define TEST_ASSERT(expression) do { if (!(expression)) { TestsController::shared().testFailed(__FILE__, __LINE__, #expression); return; } } while (0)\n',
'')
self.assert_multi_line_lint(
'#define TEST_ASSERT(expression) do { if ( !(expression)) { TestsController::shared().testFailed(__FILE__, __LINE__, #expression); return; } } while (0)\n',
'Extra space after ( in if [whitespace/parens] [5]')
# FIXME: currently we only check first conditional, so we cannot detect errors in next ones.
# self.assert_multi_line_lint(
# '#define TEST_ASSERT(expression) do { if (!(expression)) { TestsController::shared().testFailed(__FILE__, __LINE__, #expression); return; } } while (0 )\n',
# 'Mismatching spaces inside () in if [whitespace/parens] [5]')
self.assert_multi_line_lint(
'WTF_MAKE_NONCOPYABLE(ClassName); WTF_MAKE_FAST_ALLOCATED;\n',
'')
self.assert_multi_line_lint(
'if (condition) {\n'
' doSomething();\n'
' doSomethingAgain();\n'
'}\n'
'else {\n'
' doSomethingElse();\n'
' doSomethingElseAgain();\n'
'}\n',
'An else should appear on the same line as the preceding } [whitespace/newline] [4]')
self.assert_multi_line_lint(
'if (condition) doSomething(); else doSomethingElse();\n',
['More than one command on the same line [whitespace/newline] [4]',
'Else clause should never be on same line as else (use 2 lines) [whitespace/newline] [4]',
'More than one command on the same line in if [whitespace/parens] [4]'])
self.assert_multi_line_lint(
'if (condition) doSomething(); else {\n'
' doSomethingElse();\n'
'}\n',
['More than one command on the same line in if [whitespace/parens] [4]',
'One line control clauses should not use braces. [whitespace/braces] [4]'])
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else {\n'
' doSomethingElse();\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (condition) {\n'
' doSomething1();\n'
' doSomething2();\n'
'} else {\n'
' doSomethingElse();\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'void func()\n'
'{\n'
' while (condition) { }\n'
' return 0;\n'
'}\n',
'')
self.assert_multi_line_lint(
'void func()\n'
'{\n'
' for (i = 0; i < 42; i++) { foobar(); }\n'
' return 0;\n'
'}\n',
'More than one command on the same line in for [whitespace/parens] [4]')
# 3. An else if statement should be written as an if statement
# when the prior if concludes with a return statement.
self.assert_multi_line_lint(
'if (motivated) {\n'
' if (liquid)\n'
' return money;\n'
'} else if (tired)\n'
' break;\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else if (otherCondition)\n'
' doSomethingElse();\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else\n'
' doSomethingElse();\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' returnValue = foo;\n'
'else if (otherCondition)\n'
' returnValue = bar;\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' returnValue = foo;\n'
'else\n'
' returnValue = bar;\n',
'')
self.assert_multi_line_lint(
'if (condition)\n'
' doSomething();\n'
'else if (liquid)\n'
' return money;\n'
'else if (broke)\n'
' return favor;\n'
'else\n'
' sleep(28800);\n',
'')
self.assert_multi_line_lint(
'if (liquid) {\n'
' prepare();\n'
' return money;\n'
'} else if (greedy) {\n'
' keep();\n'
' return nothing;\n'
'}\n',
'An else if statement should be written as an if statement when the '
'prior "if" concludes with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
self.assert_multi_line_lint(
' if (stupid) {\n'
'infiniteLoop:\n'
' goto infiniteLoop;\n'
' } else if (evil)\n'
' goto hell;\n',
'An else if statement should be written as an if statement when the '
'prior "if" concludes with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
self.assert_multi_line_lint(
'if (liquid)\n'
'{\n'
' prepare();\n'
' return money;\n'
'}\n'
'else if (greedy)\n'
' keep();\n',
['This { should be at the end of the previous line [whitespace/braces] [4]',
'An else should appear on the same line as the preceding } [whitespace/newline] [4]',
'An else if statement should be written as an if statement when the '
'prior "if" concludes with a return, break, continue or goto statement.'
' [readability/control_flow] [4]'])
self.assert_multi_line_lint(
'if (gone)\n'
' return;\n'
'else if (here)\n'
' go();\n',
'An else if statement should be written as an if statement when the '
'prior "if" concludes with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
self.assert_multi_line_lint(
'if (gone)\n'
' return;\n'
'else\n'
' go();\n',
'An else statement can be removed when the prior "if" concludes '
'with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
self.assert_multi_line_lint(
'if (motivated) {\n'
' prepare();\n'
' continue;\n'
'} else {\n'
' cleanUp();\n'
' break;\n'
'}\n',
'An else statement can be removed when the prior "if" concludes '
'with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
self.assert_multi_line_lint(
'if (tired)\n'
' break;\n'
'else {\n'
' prepare();\n'
' continue;\n'
'}\n',
'An else statement can be removed when the prior "if" concludes '
'with a return, break, continue or goto statement.'
' [readability/control_flow] [4]')
def test_braces(self):
# 1. Function definitions: place each brace on its own line.
self.assert_multi_line_lint(
'int main()\n'
'{\n'
' doSomething();\n'
'}\n',
'')
self.assert_multi_line_lint(
'int main() {\n'
' doSomething();\n'
'}\n',
'Place brace on its own line for function definitions. [whitespace/braces] [4]')
# 2. Other braces: place the open brace on the line preceding the
# code block; place the close brace on its own line.
self.assert_multi_line_lint(
'class MyClass {\n'
' int foo;\n'
'};\n',
'')
self.assert_multi_line_lint(
'namespace WebCore {\n'
'int foo;\n'
'};\n',
'')
self.assert_multi_line_lint(
'for (int i = 0; i < 10; i++) {\n'
' DoSomething();\n'
'};\n',
'')
self.assert_multi_line_lint(
'class MyClass\n'
'{\n'
' int foo;\n'
'};\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (condition)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'for (int i = 0; i < 10; i++)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'while (true)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'foreach (Foo* foo, foos)\n'
'{\n'
' int bar;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'switch (type)\n'
'{\n'
'case foo: return;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (condition)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'for (int i = 0; i < 10; i++)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'while (true)\n'
'{\n'
' int foo;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'switch (type)\n'
'{\n'
'case foo: return;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
self.assert_multi_line_lint(
'else if (type)\n'
'{\n'
'case foo: return;\n'
'}\n',
'This { should be at the end of the previous line [whitespace/braces] [4]')
# 3. One-line control clauses should not use braces unless
# comments are included or a single statement spans multiple
# lines.
self.assert_multi_line_lint(
'if (true) {\n'
' int foo;\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'for (; foo; bar) {\n'
' int foo;\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'foreach (foo, foos) {\n'
' int bar;\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'while (true) {\n'
' int foo;\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (true)\n'
' int foo;\n'
'else {\n'
' int foo;\n'
'}\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (true) {\n'
' int foo;\n'
'} else\n'
' int foo;\n',
'One line control clauses should not use braces. [whitespace/braces] [4]')
self.assert_multi_line_lint(
'if (true) {\n'
' // Some comment\n'
' int foo;\n'
'}\n',
'')
self.assert_multi_line_lint(
'if (true) {\n'
' myFunction(reallyLongParam1, reallyLongParam2,\n'
' reallyLongParam3);\n'
'}\n',
'Weird number of spaces at line-start. Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_multi_line_lint(
'if (true) {\n'
' myFunction(reallyLongParam1, reallyLongParam2,\n'
' reallyLongParam3);\n'
'}\n',
'When wrapping a line, only indent 4 spaces. [whitespace/indent] [3]')
# 4. Control clauses without a body should use empty braces.
self.assert_multi_line_lint(
'for ( ; current; current = current->next) { }\n',
'')
self.assert_multi_line_lint(
'for ( ; current;\n'
' current = current->next) { }\n',
'Weird number of spaces at line-start. Are you using a 4-space indent? [whitespace/indent] [3]')
self.assert_multi_line_lint(
'for ( ; current; current = current->next);\n',
'Semicolon defining empty statement for this loop. Use { } instead. [whitespace/semicolon] [5]')
self.assert_multi_line_lint(
'while (true);\n',
'Semicolon defining empty statement for this loop. Use { } instead. [whitespace/semicolon] [5]')
self.assert_multi_line_lint(
'} while (true);\n',
'')
def test_null_false_zero(self):
# 1. In C++, the null pointer value should be written as 0. In C,
# it should be written as NULL. In Objective-C and Objective-C++,
# follow the guideline for C or C++, respectively, but use nil to
# represent a null Objective-C object.
self.assert_lint(
'functionCall(NULL)',
'Use 0 instead of NULL.'
' [readability/null] [5]',
'foo.cpp')
self.assert_lint(
"// Don't use NULL in comments since it isn't in code.",
'Use 0 or null instead of NULL (even in *comments*).'
' [readability/null] [4]',
'foo.cpp')
self.assert_lint(
'"A string with NULL" // and a comment with NULL is tricky to flag correctly in cpp_style.',
'Use 0 or null instead of NULL (even in *comments*).'
' [readability/null] [4]',
'foo.cpp')
self.assert_lint(
'"A string containing NULL is ok"',
'',
'foo.cpp')
self.assert_lint(
'if (aboutNULL)',
'',
'foo.cpp')
self.assert_lint(
'myVariable = NULLify',
'',
'foo.cpp')
# Make sure that the NULL check does not apply to C and Objective-C files.
self.assert_lint(
'functionCall(NULL)',
'',
'foo.c')
self.assert_lint(
'functionCall(NULL)',
'',
'foo.m')
# Make sure that the NULL check does not apply to g_object_{set,get} and
# g_str{join,concat}
self.assert_lint(
'g_object_get(foo, "prop", &bar, NULL);',
'')
self.assert_lint(
'g_object_set(foo, "prop", bar, NULL);',
'')
self.assert_lint(
'g_build_filename(foo, bar, NULL);',
'')
self.assert_lint(
'gst_bin_add_many(foo, bar, boo, NULL);',
'')
self.assert_lint(
'gst_bin_remove_many(foo, bar, boo, NULL);',
'')
self.assert_lint(
'gst_element_link_many(foo, bar, boo, NULL);',
'')
self.assert_lint(
'gst_element_unlink_many(foo, bar, boo, NULL);',
'')
self.assert_lint(
'gst_structure_get(foo, "value", G_TYPE_INT, &value, NULL);',
'')
self.assert_lint(
'gst_structure_set(foo, "value", G_TYPE_INT, value, NULL);',
'')
self.assert_lint(
'gst_structure_remove_fields(foo, "value", "bar", NULL);',
'')
self.assert_lint(
'gst_structure_new("foo", "value", G_TYPE_INT, value, NULL);',
'')
self.assert_lint(
'gst_structure_id_new(FOO, VALUE, G_TYPE_INT, value, NULL);',
'')
self.assert_lint(
'gst_structure_id_set(FOO, VALUE, G_TYPE_INT, value, NULL);',
'')
self.assert_lint(
'gst_structure_id_get(FOO, VALUE, G_TYPE_INT, &value, NULL);',
'')
self.assert_lint(
'gst_caps_new_simple(mime, "value", G_TYPE_INT, &value, NULL);',
'')
self.assert_lint(
'gst_caps_new_full(structure1, structure2, NULL);',
'')
self.assert_lint(
'gchar* result = g_strconcat("part1", "part2", "part3", NULL);',
'')
self.assert_lint(
'gchar* result = g_strconcat("part1", NULL);',
'')
self.assert_lint(
'gchar* result = g_strjoin(",", "part1", "part2", "part3", NULL);',
'')
self.assert_lint(
'gchar* result = g_strjoin(",", "part1", NULL);',
'')
self.assert_lint(
'gchar* result = gdk_pixbuf_save_to_callback(pixbuf, function, data, type, error, NULL);',
'')
self.assert_lint(
'gchar* result = gdk_pixbuf_save_to_buffer(pixbuf, function, data, type, error, NULL);',
'')
self.assert_lint(
'gchar* result = gdk_pixbuf_save_to_stream(pixbuf, function, data, type, error, NULL);',
'')
self.assert_lint(
'gtk_widget_style_get(style, "propertyName", &value, "otherName", &otherValue, NULL);',
'')
self.assert_lint(
'gtk_style_context_get_style(context, "propertyName", &value, "otherName", &otherValue, NULL);',
'')
self.assert_lint(
'gtk_widget_style_get_property(style, NULL, NULL);',
'Use 0 instead of NULL. [readability/null] [5]',
'foo.cpp')
self.assert_lint(
'gtk_widget_style_get_valist(style, NULL, NULL);',
'Use 0 instead of NULL. [readability/null] [5]',
'foo.cpp')
# 2. C++ and C bool values should be written as true and
# false. Objective-C BOOL values should be written as YES and NO.
# FIXME: Implement this.
# 3. Tests for true/false, null/non-null, and zero/non-zero should
# all be done without equality comparisons.
self.assert_lint(
'if (count == 0)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint_one_of_many_errors_re(
'if (string != NULL)',
r'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons\.')
self.assert_lint(
'if (condition == true)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint(
'if (myVariable != /* Why would anyone put a comment here? */ false)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint(
'if (0 /* This comment also looks odd to me. */ != aLongerVariableName)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint_one_of_many_errors_re(
'if (NULL == thisMayBeNull)',
r'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons\.')
self.assert_lint(
'if (true != anotherCondition)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint(
'if (false == myBoolValue)',
'Tests for true/false, null/non-null, and zero/non-zero should all be done without equality comparisons.'
' [readability/comparison_to_zero] [5]')
self.assert_lint(
'if (fontType == trueType)',
'')
self.assert_lint(
'if (othertrue == fontType)',
'')
self.assert_lint(
'if (LIKELY(foo == 0))',
'')
self.assert_lint(
'if (UNLIKELY(foo == 0))',
'')
self.assert_lint(
'if ((a - b) == 0.5)',
'')
self.assert_lint(
'if (0.5 == (a - b))',
'')
self.assert_lint(
'if (LIKELY(foo == NULL))',
'Use 0 instead of NULL. [readability/null] [5]')
self.assert_lint(
'if (UNLIKELY(foo == NULL))',
'Use 0 instead of NULL. [readability/null] [5]')
def test_directive_indentation(self):
self.assert_lint(
" #if FOO",
"preprocessor directives (e.g., #ifdef, #define, #import) should never be indented."
" [whitespace/indent] [4]",
"foo.cpp")
def test_using_std(self):
self.assert_lint(
'using std::min;',
"Use 'using namespace std;' instead of 'using std::min;'."
" [build/using_std] [4]",
'foo.cpp')
def test_max_macro(self):
self.assert_lint(
'int i = MAX(0, 1);',
'',
'foo.c')
self.assert_lint(
'int i = MAX(0, 1);',
'Use std::max() or std::max<type>() instead of the MAX() macro.'
' [runtime/max_min_macros] [4]',
'foo.cpp')
self.assert_lint(
'inline int foo() { return MAX(0, 1); }',
'Use std::max() or std::max<type>() instead of the MAX() macro.'
' [runtime/max_min_macros] [4]',
'foo.h')
def test_min_macro(self):
self.assert_lint(
'int i = MIN(0, 1);',
'',
'foo.c')
self.assert_lint(
'int i = MIN(0, 1);',
'Use std::min() or std::min<type>() instead of the MIN() macro.'
' [runtime/max_min_macros] [4]',
'foo.cpp')
self.assert_lint(
'inline int foo() { return MIN(0, 1); }',
'Use std::min() or std::min<type>() instead of the MIN() macro.'
' [runtime/max_min_macros] [4]',
'foo.h')
def test_ctype_fucntion(self):
self.assert_lint(
'int i = isascii(8);',
'Use equivelent function in <wtf/ASCIICType.h> instead of the '
'isascii() function. [runtime/ctype_function] [4]',
'foo.cpp')
def test_names(self):
name_underscore_error_message = " is incorrectly named. Don't use underscores in your identifier names. [readability/naming/underscores] [4]"
name_tooshort_error_message = " is incorrectly named. Don't use the single letter 'l' as an identifier name. [readability/naming] [4]"
# Basic cases from WebKit style guide.
self.assert_lint('struct Data;', '')
self.assert_lint('size_t bufferSize;', '')
self.assert_lint('class HTMLDocument;', '')
self.assert_lint('String mimeType();', '')
self.assert_lint('size_t buffer_size;',
'buffer_size' + name_underscore_error_message)
self.assert_lint('short m_length;', '')
self.assert_lint('short _length;',
'_length' + name_underscore_error_message)
self.assert_lint('short length_;',
'length_' + name_underscore_error_message)
self.assert_lint('unsigned _length;',
'_length' + name_underscore_error_message)
self.assert_lint('unsigned long _length;',
'_length' + name_underscore_error_message)
self.assert_lint('unsigned long long _length;',
'_length' + name_underscore_error_message)
# Allow underscores in Objective C files.
self.assert_lint('unsigned long long _length;',
'',
'foo.m')
self.assert_lint('unsigned long long _length;',
'',
'foo.mm')
self.assert_lint('#import "header_file.h"\n'
'unsigned long long _length;',
'',
'foo.h')
self.assert_lint('unsigned long long _length;\n'
'@interface WebFullscreenWindow;',
'',
'foo.h')
self.assert_lint('unsigned long long _length;\n'
'@implementation WebFullscreenWindow;',
'',
'foo.h')
self.assert_lint('unsigned long long _length;\n'
'@class WebWindowFadeAnimation;',
'',
'foo.h')
# Variable name 'l' is easy to confuse with '1'
self.assert_lint('int l;', 'l' + name_tooshort_error_message)
self.assert_lint('size_t l;', 'l' + name_tooshort_error_message)
self.assert_lint('long long l;', 'l' + name_tooshort_error_message)
# Pointers, references, functions, templates, and adjectives.
self.assert_lint('char* under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('const int UNDER_SCORE;',
'UNDER_SCORE' + name_underscore_error_message)
self.assert_lint('static inline const char const& const under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('WebCore::RenderObject* under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('int func_name();',
'func_name' + name_underscore_error_message)
self.assert_lint('RefPtr<RenderObject*> under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('WTF::Vector<WTF::RefPtr<const RenderObject* const> > under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('int under_score[];',
'under_score' + name_underscore_error_message)
self.assert_lint('struct dirent* under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('long under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('long long under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('long double under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('long long int under_score;',
'under_score' + name_underscore_error_message)
# Declarations in control statement.
self.assert_lint('if (int under_score = 42) {',
'under_score' + name_underscore_error_message)
self.assert_lint('else if (int under_score = 42) {',
'under_score' + name_underscore_error_message)
self.assert_lint('for (int under_score = 42; cond; i++) {',
'under_score' + name_underscore_error_message)
self.assert_lint('while (foo & under_score = bar) {',
'under_score' + name_underscore_error_message)
self.assert_lint('for (foo * under_score = p; cond; i++) {',
'under_score' + name_underscore_error_message)
self.assert_lint('for (foo * under_score; cond; i++) {',
'under_score' + name_underscore_error_message)
self.assert_lint('while (foo & value_in_thirdparty_library) {', '')
self.assert_lint('while (foo * value_in_thirdparty_library) {', '')
self.assert_lint('if (mli && S_OK == mli->foo()) {', '')
# More member variables and functions.
self.assert_lint('int SomeClass::s_validName', '')
self.assert_lint('int m_under_score;',
'm_under_score' + name_underscore_error_message)
self.assert_lint('int SomeClass::s_under_score = 0;',
'SomeClass::s_under_score' + name_underscore_error_message)
self.assert_lint('int SomeClass::under_score = 0;',
'SomeClass::under_score' + name_underscore_error_message)
# Other statements.
self.assert_lint('return INT_MAX;', '')
self.assert_lint('return_t under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('goto under_score;',
'under_score' + name_underscore_error_message)
self.assert_lint('delete static_cast<Foo*>(p);', '')
# Multiple variables in one line.
self.assert_lint('void myFunction(int variable1, int another_variable);',
'another_variable' + name_underscore_error_message)
self.assert_lint('int variable1, another_variable;',
'another_variable' + name_underscore_error_message)
self.assert_lint('int first_variable, secondVariable;',
'first_variable' + name_underscore_error_message)
self.assert_lint('void my_function(int variable_1, int variable_2);',
['my_function' + name_underscore_error_message,
'variable_1' + name_underscore_error_message,
'variable_2' + name_underscore_error_message])
self.assert_lint('for (int variable_1, variable_2;;) {',
['variable_1' + name_underscore_error_message,
'variable_2' + name_underscore_error_message])
# There is an exception for op code functions but only in the JavaScriptCore directory.
self.assert_lint('void this_op_code(int var1, int var2)', '', 'Source/JavaScriptCore/foo.cpp')
self.assert_lint('void op_code(int var1, int var2)', '', 'Source/JavaScriptCore/foo.cpp')
self.assert_lint('void this_op_code(int var1, int var2)', 'this_op_code' + name_underscore_error_message)
# GObject requires certain magical names in class declarations.
self.assert_lint('void webkit_dom_object_init();', '')
self.assert_lint('void webkit_dom_object_class_init();', '')
# There is an exception for GTK+ API.
self.assert_lint('void webkit_web_view_load(int var1, int var2)', '', 'Source/Webkit/gtk/webkit/foo.cpp')
self.assert_lint('void webkit_web_view_load(int var1, int var2)', '', 'Source/Webkit2/UIProcess/gtk/foo.cpp')
# Test that this doesn't also apply to files not in a 'gtk' directory.
self.assert_lint('void webkit_web_view_load(int var1, int var2)',
'webkit_web_view_load is incorrectly named. Don\'t use underscores in your identifier names.'
' [readability/naming/underscores] [4]', 'Source/Webkit/webkit/foo.cpp')
# Test that this doesn't also apply to names that don't start with 'webkit_'.
self.assert_lint_one_of_many_errors_re('void otherkit_web_view_load(int var1, int var2)',
'otherkit_web_view_load is incorrectly named. Don\'t use underscores in your identifier names.'
' [readability/naming/underscores] [4]', 'Source/Webkit/webkit/foo.cpp')
# There is an exception for some unit tests that begin with "tst_".
self.assert_lint('void tst_QWebFrame::arrayObjectEnumerable(int var1, int var2)', '')
# The Qt API uses names that begin with "qt_" or "_q_".
self.assert_lint('void QTFrame::qt_drt_is_awesome(int var1, int var2)', '')
self.assert_lint('void QTFrame::_q_drt_is_awesome(int var1, int var2)', '')
self.assert_lint('void qt_drt_is_awesome(int var1, int var2);', '')
self.assert_lint('void _q_drt_is_awesome(int var1, int var2);', '')
# Cairo forward-declarations should not be a failure.
self.assert_lint('typedef struct _cairo cairo_t;', '')
self.assert_lint('typedef struct _cairo_surface cairo_surface_t;', '')
self.assert_lint('typedef struct _cairo_scaled_font cairo_scaled_font_t;', '')
# EFL forward-declarations should not be a failure.
self.assert_lint('typedef struct _Ecore_Evas Ecore_Evas;', '')
self.assert_lint('typedef struct _Ecore_Pipe Ecore_Pipe;', '')
self.assert_lint('typedef struct _Eina_Rectangle Eina_Rectangle;', '')
self.assert_lint('typedef struct _Evas_Object Evas_Object;', '')
self.assert_lint('typedef struct _Ewk_History_Item Ewk_History_Item;', '')
# NPAPI functions that start with NPN_, NPP_ or NP_ are allowed.
self.assert_lint('void NPN_Status(NPP, const char*)', '')
self.assert_lint('NPError NPP_SetWindow(NPP instance, NPWindow *window)', '')
self.assert_lint('NPObject* NP_Allocate(NPP, NPClass*)', '')
# const_iterator is allowed as well.
self.assert_lint('typedef VectorType::const_iterator const_iterator;', '')
# vm_throw is allowed as well.
self.assert_lint('int vm_throw;', '')
# Bitfields.
self.assert_lint('unsigned _fillRule : 1;',
'_fillRule' + name_underscore_error_message)
# new operators in initialization.
self.assert_lint('OwnPtr<uint32_t> variable(new uint32_t);', '')
self.assert_lint('OwnPtr<uint32_t> variable(new (expr) uint32_t);', '')
self.assert_lint('OwnPtr<uint32_t> under_score(new uint32_t);',
'under_score' + name_underscore_error_message)
def test_parameter_names(self):
# Leave meaningless variable names out of function declarations.
meaningless_variable_name_error_message = 'The parameter name "%s" adds no information, so it should be removed. [readability/parameter_name] [5]'
parameter_error_rules = ('-',
'+readability/parameter_name')
# No variable name, so no error.
self.assertEqual('',
self.perform_lint('void func(int);', 'test.cpp', parameter_error_rules))
# Verify that copying the name of the set function causes the error (with some odd casing).
self.assertEqual(meaningless_variable_name_error_message % 'itemCount',
self.perform_lint('void setItemCount(size_t itemCount);', 'test.cpp', parameter_error_rules))
self.assertEqual(meaningless_variable_name_error_message % 'abcCount',
self.perform_lint('void setABCCount(size_t abcCount);', 'test.cpp', parameter_error_rules))
# Verify that copying a type name will trigger the warning (even if the type is a template parameter).
self.assertEqual(meaningless_variable_name_error_message % 'context',
self.perform_lint('void funct(PassRefPtr<ScriptExecutionContext> context);', 'test.cpp', parameter_error_rules))
# Verify that acronyms as variable names trigger the error (for both set functions and type names).
self.assertEqual(meaningless_variable_name_error_message % 'ec',
self.perform_lint('void setExceptionCode(int ec);', 'test.cpp', parameter_error_rules))
self.assertEqual(meaningless_variable_name_error_message % 'ec',
self.perform_lint('void funct(ExceptionCode ec);', 'test.cpp', parameter_error_rules))
# 'object' alone, appended, or as part of an acronym is meaningless.
self.assertEqual(meaningless_variable_name_error_message % 'object',
self.perform_lint('void funct(RenderView object);', 'test.cpp', parameter_error_rules))
self.assertEqual(meaningless_variable_name_error_message % 'viewObject',
self.perform_lint('void funct(RenderView viewObject);', 'test.cpp', parameter_error_rules))
self.assertEqual(meaningless_variable_name_error_message % 'rvo',
self.perform_lint('void funct(RenderView rvo);', 'test.cpp', parameter_error_rules))
# Check that r, g, b, and a are allowed.
self.assertEqual('',
self.perform_lint('void setRGBAValues(int r, int g, int b, int a);', 'test.cpp', parameter_error_rules))
# Verify that a simple substring match isn't done which would cause false positives.
self.assertEqual('',
self.perform_lint('void setNateLateCount(size_t elate);', 'test.cpp', parameter_error_rules))
self.assertEqual('',
self.perform_lint('void funct(NateLate elate);', 'test.cpp', parameter_error_rules))
# Don't have generate warnings for functions (only declarations).
self.assertEqual('',
self.perform_lint('void funct(PassRefPtr<ScriptExecutionContext> context)\n'
'{\n'
'}\n', 'test.cpp', parameter_error_rules))
def test_comments(self):
# A comment at the beginning of a line is ok.
self.assert_lint('// comment', '')
self.assert_lint(' // comment', '')
self.assert_lint('} // namespace WebCore',
'One space before end of line comments'
' [whitespace/comments] [5]')
def test_webkit_export_check(self):
webkit_export_error_rules = ('-',
'+readability/webkit_export')
self.assertEqual('',
self.perform_lint('WEBKIT_EXPORT int foo();\n',
'WebKit/chromium/public/test.h',
webkit_export_error_rules))
self.assertEqual('',
self.perform_lint('WEBKIT_EXPORT int foo();\n',
'WebKit/chromium/tests/test.h',
webkit_export_error_rules))
self.assertEqual('WEBKIT_EXPORT should only be used in header files. [readability/webkit_export] [5]',
self.perform_lint('WEBKIT_EXPORT int foo();\n',
'WebKit/chromium/public/test.cpp',
webkit_export_error_rules))
self.assertEqual('WEBKIT_EXPORT should only appear in the chromium public (or tests) directory. [readability/webkit_export] [5]',
self.perform_lint('WEBKIT_EXPORT int foo();\n',
'WebKit/chromium/src/test.h',
webkit_export_error_rules))
self.assertEqual('WEBKIT_EXPORT should not be used on a function with a body. [readability/webkit_export] [5]',
self.perform_lint('WEBKIT_EXPORT int foo() { }\n',
'WebKit/chromium/public/test.h',
webkit_export_error_rules))
self.assertEqual('WEBKIT_EXPORT should not be used on a function with a body. [readability/webkit_export] [5]',
self.perform_lint('WEBKIT_EXPORT inline int foo()\n'
'{\n'
'}\n',
'WebKit/chromium/public/test.h',
webkit_export_error_rules))
self.assertEqual('WEBKIT_EXPORT should not be used with a pure virtual function. [readability/webkit_export] [5]',
self.perform_lint('{}\n'
'WEBKIT_EXPORT\n'
'virtual\n'
'int\n'
'foo() = 0;\n',
'WebKit/chromium/public/test.h',
webkit_export_error_rules))
self.assertEqual('',
self.perform_lint('{}\n'
'WEBKIT_EXPORT\n'
'virtual\n'
'int\n'
'foo() = 0;\n',
'test.h',
webkit_export_error_rules))
def test_other(self):
# FIXME: Implement this.
pass
class CppCheckerTest(unittest.TestCase):
"""Tests CppChecker class."""
def mock_handle_style_error(self):
pass
def _checker(self):
return CppChecker("foo", "h", self.mock_handle_style_error, 3)
def test_init(self):
"""Test __init__ constructor."""
checker = self._checker()
self.assertEqual(checker.file_extension, "h")
self.assertEqual(checker.file_path, "foo")
self.assertEqual(checker.handle_style_error, self.mock_handle_style_error)
self.assertEqual(checker.min_confidence, 3)
def test_eq(self):
"""Test __eq__ equality function."""
checker1 = self._checker()
checker2 = self._checker()
# == calls __eq__.
self.assertTrue(checker1 == checker2)
def mock_handle_style_error2(self):
pass
# Verify that a difference in any argument cause equality to fail.
checker = CppChecker("foo", "h", self.mock_handle_style_error, 3)
self.assertFalse(checker == CppChecker("bar", "h", self.mock_handle_style_error, 3))
self.assertFalse(checker == CppChecker("foo", "c", self.mock_handle_style_error, 3))
self.assertFalse(checker == CppChecker("foo", "h", mock_handle_style_error2, 3))
self.assertFalse(checker == CppChecker("foo", "h", self.mock_handle_style_error, 4))
def test_ne(self):
"""Test __ne__ inequality function."""
checker1 = self._checker()
checker2 = self._checker()
# != calls __ne__.
# By default, __ne__ always returns true on different objects.
# Thus, just check the distinguishing case to verify that the
# code defines __ne__.
self.assertFalse(checker1 != checker2)
def tearDown():
"""A global check to make sure all error-categories have been tested.
The main tearDown() routine is the only code we can guarantee will be
run after all other tests have been executed.
"""
try:
if _run_verifyallcategoriesseen:
ErrorCollector(None).verify_all_categories_are_seen()
except NameError:
# If nobody set the global _run_verifyallcategoriesseen, then
# we assume we shouldn't run the test
pass
if __name__ == '__main__':
import sys
# We don't want to run the verify_all_categories_are_seen() test unless
# we're running the full test suite: if we only run one test,
# obviously we're not going to see all the error categories. So we
# only run verify_all_categories_are_seen() when no commandline flags
# are passed in.
global _run_verifyallcategoriesseen
_run_verifyallcategoriesseen = (len(sys.argv) == 1)
unittest.main()
| {
"content_hash": "b7043098282316debf0296416a378c7c",
"timestamp": "",
"source": "github",
"line_count": 4866,
"max_line_length": 170,
"avg_line_length": 45.636251541307026,
"alnum_prop": 0.5141264308809093,
"repo_name": "leighpauls/k2cro4",
"id": "6de7df61979de989b04d6d9884fc19f104a34079",
"size": "222078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Tools/Scripts/webkitpy/style/checkers/cpp_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
} |
"""Converting code to AST.
Adapted from Tangent.
"""
import ast
import inspect
import linecache
import re
import sys
import textwrap
import tokenize
import astunparse
import gast
import six
from tensorflow.python.autograph.pyct import errors
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.util import tf_inspect
PY2_PREAMBLE = textwrap.dedent("""
""")
PY3_PREAMBLE = ''
MAX_SIZE = 0
if sys.version_info >= (3, 9):
astunparse = ast
if sys.version_info >= (3,):
STANDARD_PREAMBLE = PY3_PREAMBLE
MAX_SIZE = sys.maxsize
else:
STANDARD_PREAMBLE = PY2_PREAMBLE
MAX_SIZE = sys.maxint
STANDARD_PREAMBLE_LEN = STANDARD_PREAMBLE.count('__future__')
_LEADING_WHITESPACE = re.compile(r'\s*')
def _unfold_continuations(code_string):
"""Removes any backslash line continuations from the code."""
return code_string.replace('\\\n', '')
def dedent_block(code_string):
"""Dedents a code so that its first line starts at row zero."""
code_string = _unfold_continuations(code_string)
token_gen = tokenize.generate_tokens(six.StringIO(code_string).readline)
block_indentation = None
tokens = []
try:
for tok in token_gen:
tokens.append(tok)
except tokenize.TokenError:
# Resolution of lambda functions may yield incomplete code, which can
# in turn generate this error. We silently ignore this error because the
# parser may still be able to deal with it.
pass
for tok in tokens:
tok_type, tok_string, _, _, _ = tok
if tok_type == tokenize.INDENT:
block_indentation = tok_string
block_level = len(block_indentation)
break
elif tok_type not in (
tokenize.NL, tokenize.NEWLINE, tokenize.STRING, tokenize.COMMENT):
block_indentation = ''
break
if not block_indentation:
return code_string
block_level = len(block_indentation)
first_indent_uses_tabs = '\t' in block_indentation
for i, tok in enumerate(tokens):
tok_type, tok_string, _, _, _ = tok
if tok_type == tokenize.INDENT:
if ((' ' in tok_string and first_indent_uses_tabs)
or ('\t' in tok_string and not first_indent_uses_tabs)):
# TODO(mdan): We could attempt to convert tabs to spaces by unix rule.
# See:
# https://docs.python.org/3/reference/lexical_analysis.html#indentation
raise errors.UnsupportedLanguageElementError(
'code mixing tabs and spaces for indentation is not allowed')
if len(tok_string) >= block_level:
tok_string = tok_string[block_level:]
tokens[i] = (tok_type, tok_string)
new_code = tokenize.untokenize(tokens)
# Note: untokenize respects the line structure, but not the whitespace within
# lines. For example, `def foo()` may be untokenized as `def foo ()`
# So instead of using the output of dedent, we match the leading whitespace
# on each line.
dedented_code = []
for line, new_line in zip(code_string.split('\n'), new_code.split('\n')):
original_indent = re.match(_LEADING_WHITESPACE, line).group()
new_indent = re.match(_LEADING_WHITESPACE, new_line).group()
if len(original_indent) > len(new_indent):
dedented_line = line[len(original_indent) - len(new_indent):]
else:
dedented_line = line
dedented_code.append(dedented_line)
new_code = '\n'.join(dedented_code)
return new_code
def parse_entity(entity, future_features):
"""Returns the AST and source code of given entity.
Args:
entity: Any, Python function/method/class
future_features: Iterable[Text], future features to use (e.g.
'print_statement'). See
https://docs.python.org/2/reference/simple_stmts.html#future
Returns:
gast.AST, Text: the parsed AST node; the source code that was parsed to
generate the AST (including any prefixes that this function may have added).
"""
if inspect_utils.islambda(entity):
return _parse_lambda(entity)
try:
original_source = inspect_utils.getimmediatesource(entity)
except OSError as e:
raise errors.InaccessibleSourceCodeError(
f'Unable to locate the source code of {entity}. Note that functions'
' defined in certain environments, like the interactive Python shell,'
' do not expose their source code. If that is the case, you should'
' define them in a .py source file. If you are certain the code is'
' graph-compatible, wrap the call using'
f' @tf.autograph.experimental.do_not_convert. Original error: {e}')
source = dedent_block(original_source)
future_statements = tuple(
'from __future__ import {}'.format(name) for name in future_features)
source = '\n'.join(future_statements + (source,))
return parse(source, preamble_len=len(future_features)), source
def _without_context(node, lines, minl, maxl):
"""Returns a clean node and source code without indenting and context."""
for n in gast.walk(node):
lineno = getattr(n, 'lineno', None)
if lineno is not None:
n.lineno = lineno - minl
end_lineno = getattr(n, 'end_lineno', None)
if end_lineno is not None:
n.end_lineno = end_lineno - minl
code_lines = lines[minl - 1:maxl]
# Attempt to clean up surrounding context code.
end_col_offset = getattr(node, 'end_col_offset', None)
if end_col_offset is not None:
# This is only available in 3.8.
code_lines[-1] = code_lines[-1][:end_col_offset]
col_offset = getattr(node, 'col_offset', None)
if col_offset is None:
# Older Python: try to find the "lambda" token. This is brittle.
match = re.search(r'(?<!\w)lambda(?!\w)', code_lines[0])
if match is not None:
col_offset = match.start(0)
if col_offset is not None:
code_lines[0] = code_lines[0][col_offset:]
code_block = '\n'.join([c.rstrip() for c in code_lines])
return node, code_block
def _arg_name(node):
if node is None:
return None
if isinstance(node, gast.Name):
return node.id
assert isinstance(node, str)
return node
def _node_matches_argspec(node, func):
"""Returns True is node fits the argspec of func."""
# TODO(mdan): Use just inspect once support for Python 2 is dropped.
arg_spec = tf_inspect.getfullargspec(func)
node_args = tuple(_arg_name(arg) for arg in node.args.args)
if node_args != tuple(arg_spec.args):
return False
if arg_spec.varargs != _arg_name(node.args.vararg):
return False
if arg_spec.varkw != _arg_name(node.args.kwarg):
return False
node_kwonlyargs = tuple(_arg_name(arg) for arg in node.args.kwonlyargs)
if node_kwonlyargs != tuple(arg_spec.kwonlyargs):
return False
return True
def _parse_lambda(lam):
"""Returns the AST and source code of given lambda function.
Args:
lam: types.LambdaType, Python function/method/class
Returns:
gast.AST, Text: the parsed AST node; the source code that was parsed to
generate the AST (including any prefixes that this function may have added).
"""
# TODO(mdan): Use a fast path if the definition is not multi-line.
# We could detect that the lambda is in a multi-line expression by looking
# at the surrounding code - an surrounding set of parentheses indicates a
# potential multi-line definition.
mod = inspect.getmodule(lam)
f = inspect.getsourcefile(lam)
def_line = lam.__code__.co_firstlineno
# This method is more robust that just calling inspect.getsource(mod), as it
# works in interactive shells, where getsource would fail. This is the
# same procedure followed by inspect for non-modules:
# https://github.com/python/cpython/blob/3.8/Lib/inspect.py#L772
lines = linecache.getlines(f, mod.__dict__)
source = ''.join(lines)
# Narrow down to the last node starting before our definition node.
all_nodes = parse(source, preamble_len=0, single_node=False)
search_nodes = []
for node in all_nodes:
# Also include nodes without a line number, for safety. This is defensive -
# we don't know whether such nodes might exist, and if they do, whether
# they are not safe to skip.
# TODO(mdan): Replace this check with an assertion or skip such nodes.
if getattr(node, 'lineno', def_line) <= def_line:
search_nodes.append(node)
else:
# Found a node starting past our lambda - can stop the search.
break
# Extract all lambda nodes from the shortlist.
lambda_nodes = []
for node in search_nodes:
lambda_nodes.extend(
n for n in gast.walk(node) if isinstance(n, gast.Lambda))
# Filter down to lambda nodes which span our actual lambda.
candidates = []
for ln in lambda_nodes:
minl, maxl = MAX_SIZE, 0
for n in gast.walk(ln):
minl = min(minl, getattr(n, 'lineno', minl))
lineno = getattr(n, 'lineno', maxl)
end_lineno = getattr(n, 'end_lineno', None)
if end_lineno is not None:
# end_lineno is more precise, but lineno should almost always work too.
lineno = end_lineno
maxl = max(maxl, lineno)
if minl <= def_line <= maxl:
candidates.append((ln, minl, maxl))
# Happy path: exactly one node found.
if len(candidates) == 1:
(node, minl, maxl), = candidates # pylint:disable=unbalanced-tuple-unpacking
return _without_context(node, lines, minl, maxl)
elif not candidates:
lambda_codes = '\n'.join([unparse(l) for l in lambda_nodes])
raise errors.UnsupportedLanguageElementError(
f'could not parse the source code of {lam}:'
f' no matching AST found among candidates:\n{lambda_codes}')
# Attempt to narrow down selection by signature is multiple nodes are found.
matches = [v for v in candidates if _node_matches_argspec(v[0], lam)]
if len(matches) == 1:
(node, minl, maxl), = matches
return _without_context(node, lines, minl, maxl)
# Give up if could not narrow down to a single node.
matches = '\n'.join(
'Match {}:\n{}\n'.format(i, unparse(node, include_encoding_marker=False))
for i, (node, _, _) in enumerate(matches))
raise errors.UnsupportedLanguageElementError(
f'could not parse the source code of {lam}: found multiple definitions'
' with identical signatures at the location. This error'
' may be avoided by defining each lambda on a single line and with'
f' unique argument names. The matching definitions were:\n{matches}')
# TODO(mdan): This should take futures as input instead.
def parse(src, preamble_len=0, single_node=True):
"""Returns the AST of given piece of code.
Args:
src: Text
preamble_len: Int, indicates leading nodes in the parsed AST which should be
dropped.
single_node: Bool, whether `src` is assumed to be represented by exactly one
AST node.
Returns:
ast.AST
"""
module_node = gast.parse(src)
nodes = module_node.body
if preamble_len:
nodes = nodes[preamble_len:]
if single_node:
if len(nodes) != 1:
raise ValueError('expected exactly one node, got {}'.format(nodes))
return nodes[0]
return nodes
def parse_expression(src):
"""Returns the AST of given identifier.
Args:
src: A piece of code that represents a single Python expression
Returns:
A gast.AST object.
Raises:
ValueError: if src does not consist of a single Expression.
"""
src = STANDARD_PREAMBLE + src.strip()
node = parse(src, preamble_len=STANDARD_PREAMBLE_LEN, single_node=True)
if __debug__:
if not isinstance(node, gast.Expr):
raise ValueError(
'expected exactly one node of type Expr, got {}'.format(node))
return node.value
def unparse(node, indentation=None, include_encoding_marker=True):
"""Returns the source code of given AST.
Args:
node: The code to compile, as an AST object.
indentation: Unused, deprecated. The returning code will always be indented
at 4 spaces.
include_encoding_marker: Bool, whether to include a comment on the first
line to explicitly specify UTF-8 encoding.
Returns:
code: The source code generated from the AST object
source_mapping: A mapping between the user and AutoGraph generated code.
"""
del indentation # astunparse doesn't allow configuring it.
if not isinstance(node, (list, tuple)):
node = (node,)
codes = []
if include_encoding_marker:
codes.append('# coding=utf-8')
for n in node:
if isinstance(n, gast.AST):
ast_n = gast.gast_to_ast(n)
else:
ast_n = n
if astunparse is ast:
ast.fix_missing_locations(ast_n) # Only ast needs to call this.
codes.append(astunparse.unparse(ast_n).strip())
return '\n'.join(codes)
| {
"content_hash": "6af9c257c8f887f7f28231719618ec48",
"timestamp": "",
"source": "github",
"line_count": 382,
"max_line_length": 81,
"avg_line_length": 32.824607329842934,
"alnum_prop": 0.6826700693835234,
"repo_name": "Intel-Corporation/tensorflow",
"id": "8e65084e1bbea62c50f52f5eea4885fb2d88c483",
"size": "13228",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/autograph/pyct/parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7481"
},
{
"name": "C",
"bytes": "183416"
},
{
"name": "C++",
"bytes": "24549804"
},
{
"name": "CMake",
"bytes": "160888"
},
{
"name": "Go",
"bytes": "849081"
},
{
"name": "HTML",
"bytes": "681293"
},
{
"name": "Java",
"bytes": "307123"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64142"
},
{
"name": "Protocol Buffer",
"bytes": "218430"
},
{
"name": "Python",
"bytes": "21875003"
},
{
"name": "Shell",
"bytes": "337846"
},
{
"name": "TypeScript",
"bytes": "849555"
}
],
"symlink_target": ""
} |
import binascii
import hashlib
import os
import sys
from ecdsa import SigningKey, VerifyingKey, curves
from ecdsa import ecdsa
from ecdsa import util as ecdsautil
DEFAULT_KEYTYPE = curves.NIST192p
def get_keys_folder(datafolder):
"""
:param datafolder:
:return:
"""
return os.path.join(datafolder, "keys")
def get_pub_keyfilename(datafolder):
"""
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
return os.path.join(keyfolder, "identity.pub")
def get_priv_keyfilename(datafolder):
"""
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
return os.path.join(keyfolder, "identity.priv")
def first_run(datafolder):
"""
Do our first run and generate keys
:param datafolder:
:return:
"""
keyfolder = get_keys_folder(datafolder)
if not os.path.exists(keyfolder):
os.makedirs(keyfolder)
if not os.path.isfile(get_priv_keyfilename(datafolder)):
key = genkey()
savekey(key, keyfolder, "identity")
sys.stderr.write("ident key generated\n")
def pubkeyhash(pubkey):
"""
Get a hash of a public key
:param pubkey:
:return:
"""
return hashlib.sha512(pubkey.to_der()).hexdigest()
def genkey():
"""
Generate an ECDSA key
:return:
"""
return SigningKey.generate(curve=DEFAULT_KEYTYPE)
def savekey(keypair, path, name):
"""
Save a keypair as PEM files
:param keypair:
:param path:
:param name:
:return:
"""
privname = os.path.join(path, name + ".priv")
pubname = os.path.join(path, name + ".pub")
with open(privname, "wb") as privfile:
privfile.write(keypair.to_pem())
with open(pubname, "wb") as pubfile:
pubfile.write(keypair.get_verifying_key().to_pem())
def load(privkeypem):
"""
Load a private key from disk
:param privkeypem:
:return:
"""
with open(privkeypem, "rb") as privfile:
return SigningKey.from_pem(privfile.read())
def loadpub(pubkeypem):
"""
Load a public key from a PEM file
:param pubkeypem:
:return:
"""
with open(pubkeypem, "rb") as pubfile:
return loadpubstr(pubfile.read())
def loadpubstr(pemstring):
"""
Load a public key from PEM string
:param pemstring:
:return:
"""
return VerifyingKey.from_pem(pemstring)
def get_pubkey(datafolder):
"""
Return the public key pem file
:param datafolder:
:return:
"""
filename = get_pub_keyfilename(datafolder)
if os.path.exists(filename):
with open(filename, "r") as filehandle:
return filehandle.read()
return None
def sign_string(privkey, message):
"""
Sign a string
:param privkey:
:param message:
:return:
"""
data = str(message)
sig = privkey.sign(data, hashfunc=hashlib.sha1, sigencode=ecdsautil.sigencode_der)
return binascii.hexlify(sig)
def verify_string(pubkey, signature, message):
"""
Verify
:param pubkey:
:param signature:
:param message:
:return:
"""
data = str(message)
signature = binascii.unhexlify(signature)
return pubkey.verify(signature, data, hashfunc=hashlib.sha1, sigdecode=ecdsautil.sigdecode_der)
def ecdh(privkey, pubkey):
"""
Given a loaded private key and a loaded public key, perform an ECDH exchange
:param privkey:
:param pubkey:
:return:
"""
return ecdsa.ecdh(privkey, pubkey)
| {
"content_hash": "dfd3bd9583fea59cdbe96d2942f5bc23",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 99,
"avg_line_length": 21.9,
"alnum_prop": 0.636986301369863,
"repo_name": "inorton/NULLTeamPlugin",
"id": "04533e7da07212f6edfcc8bf7a0aa3636452a510",
"size": "3504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "identity.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "576060"
}
],
"symlink_target": ""
} |
from tests.beeswax.impala_beeswax import ImpalaBeeswaxClient, QueryResult
from thrift.transport.TSocket import TSocket
from thrift.protocol import TBinaryProtocol
from thrift.transport.TTransport import TBufferedTransport, TTransportException
from getpass import getuser
import abc
import logging
import os
LOG = logging.getLogger('impala_connection')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
# All logging needs to be either executable SQL or a SQL comment (prefix with --).
console_handler.setFormatter(logging.Formatter('%(message)s'))
LOG.addHandler(console_handler)
LOG.propagate = False
# Common wrapper around the internal types of HS2/Beeswax operation/query handles.
class OperationHandle(object):
def __init__(self, handle):
self.__handle = handle
def get_handle(self): return self.__handle
# Represents an Impala connection.
class ImpalaConnection(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def set_configuration_option(self, name, value):
"""Sets a configuraiton option name to the given value"""
pass
@abc.abstractmethod
def get_configuration(self):
"""Returns the configuration (a dictionary of key-value pairs) for this connection"""
pass
@abc.abstractmethod
def set_configuration(self, configuration_option_dict):
"""Replaces existing configuration with the given dictionary"""
pass
@abc.abstractmethod
def clear_configuration(self):
"""Clears all existing configuration."""
pass
@abc.abstractmethod
def connect(self):
"""Opens the connection"""
pass
@abc.abstractmethod
def close(self):
"""Closes the connection. Can be called multiple times"""
pass
@abc.abstractmethod
def close_query(self, handle):
"""Closes the query."""
pass
@abc.abstractmethod
def get_state(self, operation_handle):
"""Returns the state of a query"""
pass
@abc.abstractmethod
def get_log(self, operation_handle):
"""Returns the log of an operation"""
pass
@abc.abstractmethod
def cancel(self, operation_handle):
"""Cancels an in-flight operation"""
pass
def execute(self, sql_stmt):
"""Executes a query and fetches the results"""
pass
@abc.abstractmethod
def execute_async(self, sql_stmt):
"""Issues a query and returns the handle to the caller for processing"""
pass
@abc.abstractmethod
def fetch(self, sql_stmt, operation_handle, max_rows=-1):
"""Fetches query results up to max_rows given a handle and sql statement.
If max_rows < 0, all rows are fetched. If max_rows > 0 but the number of
rows returned is less than max_rows, all the rows have been fetched."""
pass
# Represents a connection to Impala using the Beeswax API.
class BeeswaxConnection(ImpalaConnection):
def __init__(self, host_port, use_kerberos=False, user=None, password=None,
use_ssl=False):
self.__beeswax_client = ImpalaBeeswaxClient(host_port, use_kerberos, user=user,
password=password, use_ssl=use_ssl)
self.__host_port = host_port
self.QUERY_STATES = self.__beeswax_client.query_states
def set_configuration_option(self, name, value):
# Only set the option if it's not already set to the same value.
if self.__beeswax_client.get_query_option(name) != value:
LOG.info('SET %s=%s;' % (name, value))
self.__beeswax_client.set_query_option(name, value)
def get_configuration(self):
return self.__beeswax_client.get_query_options
def set_configuration(self, config_option_dict):
assert config_option_dict is not None, "config_option_dict cannot be None"
self.clear_configuration()
for name, value in config_option_dict.iteritems():
self.set_configuration_option(name, value)
def clear_configuration(self):
self.__beeswax_client.clear_query_options()
def connect(self):
LOG.info("-- connecting to: %s" % self.__host_port)
self.__beeswax_client.connect()
# TODO: rename to close_connection
def close(self):
LOG.info("-- closing connection to: %s" % self.__host_port)
self.__beeswax_client.close_connection()
def close_query(self, operation_handle):
LOG.info("-- closing query for operation handle: %s" % operation_handle)
self.__beeswax_client.close_query(operation_handle.get_handle())
def execute(self, sql_stmt):
LOG.info("-- executing against %s\n%s;\n" % (self.__host_port, sql_stmt))
return self.__beeswax_client.execute(sql_stmt)
def execute_async(self, sql_stmt):
LOG.info("-- executing async: %s\n%s;\n" % (self.__host_port, sql_stmt))
return OperationHandle(self.__beeswax_client.execute_query_async(sql_stmt))
def cancel(self, operation_handle):
LOG.info("-- canceling operation: %s" % operation_handle)
return self.__beeswax_client.cancel_query(operation_handle.get_handle())
def get_state(self, operation_handle):
LOG.info("-- getting state for operation: %s" % operation_handle)
return self.__beeswax_client.get_state(operation_handle.get_handle())
def get_runtime_profile(self, operation_handle):
LOG.info("-- getting runtime profile operation: %s" % operation_handle)
return self.__beeswax_client.get_runtime_profile(operation_handle.get_handle())
def get_log(self, operation_handle):
LOG.info("-- getting log for operation: %s" % operation_handle)
return self.__beeswax_client.get_log(operation_handle.get_handle())
def refresh(self):
"""Invalidate the Impalad catalog"""
return self.execute("invalidate metadata")
def invalidate_table(self, table_name):
"""Invalidate a specific table from the catalog"""
return self.execute("invalidate metadata %s" % (table_name))
def refresh_table(self, db_name, table_name):
"""Refresh a specific table from the catalog"""
return self.execute("refresh %s.%s" % (db_name, table_name))
def fetch(self, sql_stmt, operation_handle, max_rows = -1):
LOG.info("-- fetching results from: %s" % operation_handle)
return self.__beeswax_client.fetch_results(
sql_stmt, operation_handle.get_handle(), max_rows)
def create_connection(host_port, use_kerberos=False):
# TODO: Support HS2 connections.
return BeeswaxConnection(host_port=host_port, use_kerberos=use_kerberos)
def create_ldap_connection(host_port, user, password, use_ssl=False):
return BeeswaxConnection(host_port=host_port, user=user, password=password,
use_ssl=use_ssl)
| {
"content_hash": "0bea16ab075b58d81cc686cd56eac834",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 89,
"avg_line_length": 35.178378378378376,
"alnum_prop": 0.7034419176398279,
"repo_name": "rampage644/impala-cut",
"id": "bc6d17d7f36ea5ddb22fb2dfa0b1b8c585bec1b6",
"size": "7299",
"binary": false,
"copies": "2",
"ref": "refs/heads/executor",
"path": "tests/common/impala_connection.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "279767"
},
{
"name": "C++",
"bytes": "4596364"
},
{
"name": "Java",
"bytes": "2424265"
},
{
"name": "Objective-C",
"bytes": "15162"
},
{
"name": "PHP",
"bytes": "361"
},
{
"name": "Python",
"bytes": "1254137"
},
{
"name": "Shell",
"bytes": "95400"
}
],
"symlink_target": ""
} |
import RPi.GPIO as GPIO
from time import sleep
class Shifter:
"""Set pins for data, clock and latch; chain length and board mode respectively"""
def __init__(self, dataPin, clockPin, latchPin, chain = 1,
boardMode = GPIO.BOARD):
self.DATA = dataPin
self.CLOCK = clockPin
self.LATCH = latchPin
self.CHAIN = chain
self.BOARDMODE = boardMode
"""Value stored in 595's storage register"""
self.STORED=0x00
# Setup pins
GPIO.setmode(self.BOARDMODE)
GPIO.setup(self.DATA, GPIO.OUT)
GPIO.setup(self.LATCH, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.CLOCK, GPIO.OUT, initial=GPIO.LOW)
"""Push a single bit into the registers.
writeLatch should be called after 8*CHAIN pushes"""
def pushBit(self,state):
GPIO.output(self.CLOCK, 0)
GPIO.output(self.DATA, state)
GPIO.output(self.CLOCK, 1)
"""Transfer bits from shift register to storage register"""
def writeLatch(self):
GPIO.output(self.LATCH, 1)
GPIO.output(self.LATCH, 0)
"""Write a byte of length 8*CHAIN to the 595"""
def writeByte(self,value):
for i in range(8*self.CHAIN):
bit = (value << i) & (0x80 << 2*(self.CHAIN-1))
self.pushBit( bit )
self.writeLatch()
self.STORED=value
"""High level write to a single pin"""
def writePin(self, pin, value):
oldVal = (self.STORED >> pin) & 0x01
if oldVal != value:
self.togglePin(pin)
"""Togggle the state of a single pin"""
def togglePin(self, pin):
self.writeByte(self.STORED ^ (0x01 << pin))
"""Clean up pins"""
def cleanup(self):
GPIO.cleanup()
| {
"content_hash": "a290ae1ddb4514ca94aa5a545fc104ba",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 86,
"avg_line_length": 32.2962962962963,
"alnum_prop": 0.5997706422018348,
"repo_name": "kneitinger/RPiShift",
"id": "56cf85389675db0e80b94afe9136c5c064e82dcf",
"size": "1900",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RPiShift/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4083"
}
],
"symlink_target": ""
} |
"""Scoring functions relating to the ground station v2."""
from makani.avionics.common import pack_avionics_messages
from makani.lib.python.batch_sim import scoring_functions
import numpy as np
from scipy.signal import periodogram
import scoring_functions_util as scoring_util
def diff_wrap_angle(n_revs, angle):
"""Returns angle difference wrapped between [0, 2*pi*n_revs).
Args:
n_revs: Integer indicating the number of revolutions.
angle: Numpy array with the angles to be diff'ed and wrapped.
Returns:
Numpy array with the input angle diff'ed and wrapped.
"""
wrapping_angle = n_revs * np.pi
angle_diff = np.diff(np.mod(angle, 2.0 * wrapping_angle))
angle_diff[np.where(angle_diff >= wrapping_angle)] -= 2.0 * wrapping_angle
return angle_diff
class GsgYokeAnglesScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
"""Tests if the gsg yoke angle falls outside its physical limits."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity):
super(GsgYokeAnglesScoringFunction, self).__init__(
'Gsg Yoke Range', 'deg', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
def GetSystemLabels(self):
return ['gs02']
def GetValue(self, output):
return np.array([output['gsg_yoke_min'],
output['gsg_yoke_max']])
def GetOutput(self, timeseries):
gsg_yoke = timeseries['gsg_yoke']
return {
'gsg_yoke_max': np.max(gsg_yoke),
'gsg_yoke_min': np.min(gsg_yoke)
}
def GetTimeSeries(self, params, sim, control):
gsg_yoke = self._SelectTelemetry(sim, control, 'gsg_yoke')
return {'gsg_yoke': np.rad2deg(gsg_yoke)}
class GsgTerminationAnglesScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
"""Tests if the gsg termination angle falls outside its physical limits."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity):
super(GsgTerminationAnglesScoringFunction, self).__init__(
'Gsg Termination Range', 'deg', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
def GetSystemLabels(self):
return ['gs02']
def GetValue(self, output):
return np.array([output['gsg_termination_min'],
output['gsg_termination_max']])
def GetOutput(self, timeseries):
gsg_termination = timeseries['gsg_termination']
return {
'gsg_termination_max': np.max(gsg_termination),
'gsg_termination_min': np.min(gsg_termination)
}
def GetTimeSeries(self, params, sim, control):
gsg_termination = self._SelectTelemetry(sim, control, 'gsg_termination')
return {'gsg_termination': np.rad2deg(gsg_termination)}
class GsAzimuthErrorScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
"""Checks if azimuth error falls within design limits in high tension mode."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity):
super(GsAzimuthErrorScoringFunction, self).__init__(
'GS02 Azimuth Error Range', 'deg', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
def GetSystemLabels(self):
return ['gs02']
def GetValue(self, output):
return np.array([output['gs_azimuth_error_min'],
output['gs_azimuth_error_max']])
def GetOutput(self, timeseries):
gs_azimuth_error = timeseries['gs_azimuth_error']
return {
'gs_azimuth_error_max': np.max(gs_azimuth_error),
'gs_azimuth_error_min': np.min(gs_azimuth_error)
}
def GetTimeSeries(self, params, sim, control):
gs_azimuth_error = self._SelectTelemetry(sim, control, 'gs_azimuth_error')
return {'gs_azimuth_error': np.rad2deg(gs_azimuth_error)}
class GsDetwistCommandJumpScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
""""Checks if jumps in the detwist command fall within acceptable limits."""
def __init__(self, good_limit, bad_limit, severity):
super(GsDetwistCommandJumpScoringFunction, self).__init__(
'Detwist Command Jump', 'deg', good_limit, bad_limit, severity)
def GetSystemLabels(self):
return ['gs02', 'control', 'experimental']
def GetValue(self, output):
return output['max_detwist_cmd_jump']
def GetOutput(self, output):
return {
'max_detwist_cmd_jump': np.max(np.abs(output['gs_detwist_cmd_diff']))
}
def GetTimeSeries(self, params, sim, control):
# The detwist command is in [0, TETHER_DETWIST_REVS * 2 * pi).
gs_detwist_cmd = self._SelectTelemetry(sim, control, ['gs_detwist_cmd'])
diff = diff_wrap_angle(
pack_avionics_messages.TETHER_DETWIST_REVS, gs_detwist_cmd)
return {'gs_detwist_cmd_diff': np.rad2deg(diff)}
class GsDetwistCommandRateScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
""""Checks if the detwist command derivative is within acceptable limits."""
def __init__(self, good_limit, bad_limit, severity):
super(GsDetwistCommandRateScoringFunction, self).__init__(
'Detwist Command Rate', 'deg/s', good_limit, bad_limit, severity)
def GetSystemLabels(self):
return ['gs02', 'control']
def GetValue(self, output):
return output['max_detwist_cmd_rate']
def GetOutput(self, output):
return {
'max_detwist_cmd_rate': np.max(np.abs(output['gs_detwist_cmd_rate']))
}
def GetTimeSeries(self, params, sim, control):
# The detwist command is in [0, TETHER_DETWIST_REVS * 2 * pi).
time, gs_detwist_cmd = self._SelectTelemetry(
sim, control, ['time', 'gs_detwist_cmd'])
dt = scoring_util.GetTimeSamp(time)
if np.isnan(dt):
return {'gs_detwist_cmd_rate': np.array([float('nan')])}
gs_detwist_cmd_diff = diff_wrap_angle(
pack_avionics_messages.TETHER_DETWIST_REVS, gs_detwist_cmd)
gs_detwist_cmd_deriv = gs_detwist_cmd_diff / dt
return {'gs_detwist_cmd_rate': np.rad2deg(gs_detwist_cmd_deriv)}
class GsDetwistOscillationsScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
""""Checks if there exist oscillations in the detwist angle."""
def __init__(self, good_limit, bad_limit, severity):
super(GsDetwistOscillationsScoringFunction, self).__init__(
'Detwist Oscillations', 'dB', good_limit, bad_limit, severity)
self.SetSourcePriority(['control'])
def GetSystemLabels(self):
return ['gs02', 'control']
def GetValue(self, output):
return output['gs_detwist_max_ratio']
def GetOutput(self, output):
if output['gs_detwist_ratios_per_loop']:
lobe_power_ratio = [r[2] for r in output['gs_detwist_ratios_per_loop']]
else:
lobe_power_ratio = np.nan
return {
'gs_detwist_max_ratio': np.max(lobe_power_ratio)
}
def GetTimeSeries(self, params, sim, control):
time, gs_detwist_pos, loop_angle = self._SelectTelemetry(
sim, control, ['time', 'gs_detwist_pos', 'loop_angle'])
accum_loops = np.cumsum(np.diff(loop_angle) > np.deg2rad(350.))
dt = scoring_util.GetTimeSamp(time)
if np.isnan(dt):
return {'gs_detwist_ratios_per_loop': []}
# Obtain the derivative of the detwist angle to remove the steadily
# decreasing ramp and the jump at every revolution. The frequency content
# is preserved.
# Wrapping first the detwist position to [0, 2*pi).
gs_detwist_pos_diff = diff_wrap_angle(
pack_avionics_messages.TETHER_DETWIST_REVS, gs_detwist_pos)
gs_detwist_pos_deriv = gs_detwist_pos_diff / dt
# Obtain the number of points in the FFT based on the desired frequency
# resolution and the sampling time: frequency_resolution ~= fs/nfft Hz.
frequency_resolution = 0.1 # [Hz]
nfft = 2.0 ** np.ceil(np.log2(1.0 / dt / frequency_resolution))
# Iterate through all loops.
num_loops = np.floor(accum_loops[-1])
if np.isnan(num_loops):
num_loops = 0
gs_detwist_ratios_per_loop = []
for loop in range(1, int(num_loops) + 1):
detwist_deriv_this_loop = gs_detwist_pos_deriv[
np.where(np.floor(accum_loops).astype(int) == loop)]
f_this_loop, pxx_this_loop = periodogram(detwist_deriv_this_loop,
fs=1.0 / dt, nfft=int(nfft),
scaling='spectrum',
detrend=False)
# Find the peaks in the spectrum.
# Peaks are where the derivative changes sign and the second derivative
# is negative.
pxx_diff_this_loop = np.diff(10.0 * np.log10(pxx_this_loop))
pxx_diff_sign_change_this_loop = (pxx_diff_this_loop[1:] *
pxx_diff_this_loop[0:-1])
pxx_diff_diff_this_loop = np.diff(pxx_diff_this_loop)
min_freq = 0.25 # [Hz]
pxx_peaks_idx_this_loop = np.where(
(pxx_diff_sign_change_this_loop < 0.) &
(pxx_diff_diff_this_loop < 0.) &
(f_this_loop[0:-2] > min_freq))[0] + 1
if pxx_peaks_idx_this_loop.size == 0:
# No peaks were found.
continue
# Get the highest secondary lobe.
max_peak_this_loop_db = 10.0 * np.log10(
np.max(pxx_this_loop[pxx_peaks_idx_this_loop]))
max_ratio_this_loop_db = (max_peak_this_loop_db -
10.0 * np.log10(pxx_this_loop[0]))
f_secondary_this_loop = f_this_loop[
pxx_peaks_idx_this_loop[
np.argmax(pxx_this_loop[pxx_peaks_idx_this_loop])]]
# Store tuple (loop number, lobe frequency [Hz], lobe power ratio [dB]).
gs_detwist_ratios_per_loop.append((loop + 1, f_secondary_this_loop,
max_ratio_this_loop_db))
return {'gs_detwist_ratios_per_loop': gs_detwist_ratios_per_loop}
class GSTetherTwistScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
""""Checks the number of tether twists."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity):
super(GSTetherTwistScoringFunction, self).__init__(
'Tether twists', '#', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
def GetSystemLabels(self):
return ['gs02', 'control']
def GetValue(self, output):
return output['peak_tether_twists']
def GetOutput(self, output):
max_value = np.max(output['tether_twists'])
min_value = np.min(output['tether_twists'])
return {
'peak_tether_twists': max_value if
max_value > np.abs(min_value) else min_value
}
def GetTimeSeries(self, params, sim, control):
accum_kite_loops = self._SelectTelemetry(sim, control, ['accum_kite_loops'])
accum_detwist_loops = self._SelectTelemetry(sim, control,
['accum_detwist_loops'])
return {'tether_twists': accum_kite_loops - accum_detwist_loops}
| {
"content_hash": "732932c44b74fbd4cf32f62cab6bb1e1",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 80,
"avg_line_length": 37.49662162162162,
"alnum_prop": 0.6511397423191279,
"repo_name": "google/makani",
"id": "cf808477e87609c582b1f72130a4902470989e8d",
"size": "11688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python/batch_sim/scoring_functions/gs02.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "119408"
},
{
"name": "C",
"bytes": "20174258"
},
{
"name": "C++",
"bytes": "30512322"
},
{
"name": "CSS",
"bytes": "8921"
},
{
"name": "Dockerfile",
"bytes": "1381"
},
{
"name": "Emacs Lisp",
"bytes": "1134"
},
{
"name": "HTML",
"bytes": "65745"
},
{
"name": "Java",
"bytes": "1558475"
},
{
"name": "JavaScript",
"bytes": "130727"
},
{
"name": "Jupyter Notebook",
"bytes": "1154728"
},
{
"name": "MATLAB",
"bytes": "1026162"
},
{
"name": "Makefile",
"bytes": "2798"
},
{
"name": "Objective-C",
"bytes": "62972"
},
{
"name": "Perl",
"bytes": "870724"
},
{
"name": "Python",
"bytes": "5552781"
},
{
"name": "RPC",
"bytes": "195736"
},
{
"name": "Roff",
"bytes": "2567875"
},
{
"name": "SWIG",
"bytes": "8663"
},
{
"name": "Shell",
"bytes": "297941"
},
{
"name": "Starlark",
"bytes": "462998"
},
{
"name": "Vim Script",
"bytes": "2281"
},
{
"name": "XC",
"bytes": "50398"
},
{
"name": "XS",
"bytes": "49289"
}
],
"symlink_target": ""
} |
from ..common import SummaryBase
class CGPointSyntheticProvider(SummaryBase.SummaryBaseSyntheticProvider):
"""
Class representing CGPoint structure.
"""
# struct CGPoint {
# CGFloat x;
# CGFloat y;
# };
# typedef struct CGPoint CGPoint;
def __init__(self, value_obj, internal_dict):
super(CGPointSyntheticProvider, self).__init__(value_obj, internal_dict)
self.register_child_value("x", ivar_name="x", primitive_value_function=SummaryBase.get_float_value)
self.register_child_value("y", ivar_name="y", primitive_value_function=SummaryBase.get_float_value)
| {
"content_hash": "64cdcdebb27e943f1dd10dd1c190b632",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 107,
"avg_line_length": 36.64705882352941,
"alnum_prop": 0.6869983948635634,
"repo_name": "bartoszj/Mallet",
"id": "9339fc86c71e581e7b2a26a739558fa5d55c7469",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mallet/CoreGraphics/CGPoint.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "109523"
},
{
"name": "Python",
"bytes": "546997"
},
{
"name": "Ruby",
"bytes": "734"
},
{
"name": "Shell",
"bytes": "4823"
},
{
"name": "Swift",
"bytes": "57594"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import os
import random
import sys
import time
import traceback
import types
from collections import defaultdict
from copy import deepcopy
from shutil import move
from tempfile import mkstemp
from django.conf import settings
from django.core.cache import cache
import six
try:
from importlib import import_module
except ImportError: # python < 2.7 compatibility
from django.utils.importlib import import_module
from graphite.logger import log
from graphite.errors import InputParameterError
from graphite.node import LeafNode
from graphite.intervals import Interval, IntervalSet
from graphite.finders.utils import FindQuery, BaseFinder
from graphite.readers import MultiReader
from graphite.worker_pool.pool import get_pool, pool_exec, Job, PoolTimeoutError
from graphite.render.grammar import grammar
def get_finders(finder_path):
module_name, class_name = finder_path.rsplit('.', 1)
module = import_module(module_name)
cls = getattr(module, class_name)
if getattr(cls, 'factory', None):
return cls.factory()
# monkey patch so legacy finders will work
finder = cls()
if sys.version_info[0] >= 3:
finder.fetch = types.MethodType(BaseFinder.fetch, finder)
finder.find_multi = types.MethodType(BaseFinder.find_multi, finder)
finder.get_index = types.MethodType(BaseFinder.get_index, finder)
else:
finder.fetch = types.MethodType(BaseFinder.fetch.__func__, finder)
finder.find_multi = types.MethodType(BaseFinder.find_multi.__func__, finder)
finder.get_index = types.MethodType(BaseFinder.get_index.__func__, finder)
return [finder]
def get_tagdb(tagdb_path):
module_name, class_name = tagdb_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, class_name)(settings, cache=cache, log=log)
class Store(object):
def __init__(self, finders=None, tagdb=None):
if finders is None:
finders = []
for finder_path in settings.STORAGE_FINDERS:
finders.extend(get_finders(finder_path))
self.finders = finders
if tagdb is None:
tagdb = get_tagdb(settings.TAGDB or 'graphite.tags.base.DummyTagDB')
self.tagdb = tagdb
def get_finders(self, local=False):
for finder in self.finders:
# Support legacy finders by defaulting to 'disabled = False'
if getattr(finder, 'disabled', False):
continue
# Support legacy finders by defaulting to 'local = True'
if local and not getattr(finder, 'local', True):
continue
yield finder
def pool_exec(self, jobs, timeout):
if not jobs:
return []
thread_count = 0
if settings.USE_WORKER_POOL:
thread_count = min(len(self.finders), settings.POOL_MAX_WORKERS)
return pool_exec(get_pool('finders', thread_count), jobs, timeout)
def wait_jobs(self, jobs, timeout, context):
if not jobs:
return []
start = time.time()
results = []
failed = []
done = 0
try:
for job in self.pool_exec(jobs, timeout):
elapsed = time.time() - start
done += 1
if job.exception:
failed.append(job)
log.info("Exception during %s after %fs: %s" % (
job, elapsed, str(job.exception))
)
else:
log.debug("Got a result for %s after %fs" % (job, elapsed))
results.append(job.result)
except PoolTimeoutError:
message = "Timed out after %fs for %s" % (
time.time() - start, context
)
log.info(message)
if done == 0:
raise Exception(message)
if len(failed) == done:
message = "All requests failed for %s (%d)" % (
context, len(failed)
)
for job in failed:
message += "\n\n%s: %s: %s" % (
job, job.exception,
'\n'.join(traceback.format_exception(*job.exception_info))
)
raise Exception(message)
if len(results) < len(jobs) and settings.STORE_FAIL_ON_ERROR:
message = "%s request(s) failed for %s (%d)" % (
len(jobs) - len(results), context, len(jobs)
)
for job in failed:
message += "\n\n%s: %s: %s" % (
job, job.exception,
'\n'.join(traceback.format_exception(*job.exception_info))
)
raise Exception(message)
return results
def fetch(self, patterns, startTime, endTime, now, requestContext):
# deduplicate patterns
patterns = sorted(set(patterns))
if not patterns:
return []
log.debug(
'graphite.storage.Store.fetch :: Starting fetch on all backends')
jobs = []
tag_patterns = None
pattern_aliases = defaultdict(list)
for finder in self.get_finders(requestContext.get('localOnly')):
# if the finder supports tags, just pass the patterns through
if getattr(finder, 'tags', False):
job = Job(
finder.fetch, 'fetch for %s' % patterns,
patterns, startTime, endTime,
now=now, requestContext=requestContext
)
jobs.append(job)
continue
# if we haven't resolved the seriesByTag calls, build resolved patterns and translation table
if tag_patterns is None:
tag_patterns, pattern_aliases = self._tag_patterns(patterns, requestContext)
# dispatch resolved patterns to finder
job = Job(
finder.fetch,
'fetch for %s' % tag_patterns,
tag_patterns, startTime, endTime,
now=now, requestContext=requestContext
)
jobs.append(job)
# Start fetches
start = time.time()
results = self.wait_jobs(jobs, settings.FETCH_TIMEOUT,
'fetch for %s' % str(patterns))
results = [i for l in results for i in l] # flatten
# translate path expressions for responses from resolved seriesByTag patterns
for result in results:
if result['name'] == result['pathExpression'] and result['pathExpression'] in pattern_aliases:
for pathExpr in pattern_aliases[result['pathExpression']]:
newresult = deepcopy(result)
newresult['pathExpression'] = pathExpr
results.append(newresult)
log.debug("Got all fetch results for %s in %fs" % (str(patterns), time.time() - start))
return results
def _tag_patterns(self, patterns, requestContext):
tag_patterns = []
pattern_aliases = defaultdict(list)
for pattern in patterns:
# if pattern isn't a seriesByTag call, just add it to the list
if not pattern.startswith('seriesByTag('):
tag_patterns.append(pattern)
continue
# perform the tagdb lookup
exprs = tuple([
t.string[1:-1]
for t in grammar.parseString(pattern).expression.call.args
if t.string
])
taggedSeries = self.tagdb.find_series(exprs, requestContext=requestContext)
if not taggedSeries:
continue
# add to translation table for path matching
for series in taggedSeries:
pattern_aliases[series].append(pattern)
# add to list of resolved patterns
tag_patterns.extend(taggedSeries)
return sorted(set(tag_patterns)), pattern_aliases
def get_index(self, requestContext=None):
log.debug('graphite.storage.Store.get_index :: Starting get_index on all backends')
if not requestContext:
requestContext = {}
context = 'get_index'
jobs = [
Job(finder.get_index, context, requestContext=requestContext)
for finder in self.get_finders(local=requestContext.get('localOnly'))
]
start = time.time()
results = self.wait_jobs(jobs, settings.FETCH_TIMEOUT, context)
results = [i for l in results if l is not None for i in l] # flatten
log.debug("Got all index results in %fs" % (time.time() - start))
return sorted(list(set(results)))
def find(self, pattern, startTime=None, endTime=None, local=False, headers=None, leaves_only=False):
try:
query = FindQuery(
pattern, startTime, endTime,
local=local,
headers=headers,
leaves_only=leaves_only
)
except Exception as e:
raise InputParameterError(
'Failed to instantiate find query: {err}'
.format(err=str(e)))
warn_threshold = settings.METRICS_FIND_WARNING_THRESHOLD
fail_threshold = settings.METRICS_FIND_FAILURE_THRESHOLD
matched_leafs = 0
for match in self._find(query):
if isinstance(match, LeafNode):
matched_leafs += 1
elif leaves_only:
continue
if matched_leafs > fail_threshold:
raise Exception(
("Query %s yields too many results and failed "
"(failure threshold is %d)") % (pattern, fail_threshold))
yield match
if matched_leafs > warn_threshold:
log.warning(
("Query %s yields large number of results up to %d "
"(warning threshold is %d)") % (
pattern, matched_leafs, warn_threshold))
def _find(self, query):
context = 'find %s' % query
jobs = [
Job(finder.find_nodes, context, query)
for finder in self.get_finders(query.local)
]
# Group matching nodes by their path
nodes_by_path = defaultdict(list)
# Start finds
start = time.time()
results = self.wait_jobs(jobs, settings.FIND_TIMEOUT, context)
for result in results:
for node in result or []:
nodes_by_path[node.path].append(node)
log.debug("Got all find results for %s in %fs" % (
str(query), time.time() - start)
)
return self._list_nodes(query, nodes_by_path)
def _list_nodes(self, query, nodes_by_path):
# Reduce matching nodes for each path to a minimal set
found_branch_nodes = set()
items = list(six.iteritems(nodes_by_path))
random.shuffle(items)
for path, nodes in items:
leaf_nodes = []
# First we dispense with the BranchNodes
for node in nodes:
if node.is_leaf:
leaf_nodes.append(node)
# TODO need to filter branch nodes based on requested
# interval... how?!?!?
elif node.path not in found_branch_nodes:
yield node
found_branch_nodes.add(node.path)
leaf_node = self._merge_leaf_nodes(query, path, leaf_nodes)
if leaf_node:
yield leaf_node
def _merge_leaf_nodes(self, query, path, leaf_nodes):
"""Get a single node from a list of leaf nodes."""
if not leaf_nodes:
return None
# Fast-path when there is a single node.
if len(leaf_nodes) == 1:
return leaf_nodes[0]
# Calculate best minimal node set
minimal_node_set = set()
covered_intervals = IntervalSet([])
# If the query doesn't fall entirely within the FIND_TOLERANCE window
# we disregard the window. This prevents unnecessary remote fetches
# caused when carbon's cache skews node.intervals, giving the appearance
# remote systems have data we don't have locally, which we probably
# do.
now = int(time.time())
tolerance_window = now - settings.FIND_TOLERANCE
disregard_tolerance_window = query.interval.start < tolerance_window
prior_to_window = Interval(float('-inf'), tolerance_window)
def measure_of_added_coverage(
node, drop_window=disregard_tolerance_window):
relevant_intervals = node.intervals.intersect_interval(
query.interval)
if drop_window:
relevant_intervals = relevant_intervals.intersect_interval(
prior_to_window)
return covered_intervals.union(
relevant_intervals).size - covered_intervals.size
nodes_remaining = list(leaf_nodes)
# Prefer local nodes first (and do *not* drop the tolerance window)
for node in leaf_nodes:
if node.local and measure_of_added_coverage(node, False) > 0:
nodes_remaining.remove(node)
minimal_node_set.add(node)
covered_intervals = covered_intervals.union(node.intervals)
if settings.REMOTE_STORE_MERGE_RESULTS:
remote_nodes = [n for n in nodes_remaining if not n.local]
for node in remote_nodes:
nodes_remaining.remove(node)
minimal_node_set.add(node)
covered_intervals = covered_intervals.union(node.intervals)
else:
while nodes_remaining:
node_coverages = [(measure_of_added_coverage(n), n)
for n in nodes_remaining]
best_coverage, best_node = max(node_coverages)
if best_coverage == 0:
break
nodes_remaining.remove(best_node)
minimal_node_set.add(best_node)
covered_intervals = covered_intervals.union(
best_node.intervals)
# Sometimes the requested interval falls within the caching window.
# We include the most likely node if the gap is within
# tolerance.
if not minimal_node_set:
def distance_to_requested_interval(node):
if not node.intervals:
return float('inf')
latest = sorted(
node.intervals, key=lambda i: i.end)[-1]
distance = query.interval.start - latest.end
return distance if distance >= 0 else float('inf')
best_candidate = min(
leaf_nodes, key=distance_to_requested_interval)
if distance_to_requested_interval(
best_candidate) <= settings.FIND_TOLERANCE:
minimal_node_set.add(best_candidate)
if not minimal_node_set:
return None
elif len(minimal_node_set) == 1:
return minimal_node_set.pop()
else:
reader = MultiReader(minimal_node_set)
return LeafNode(path, reader)
def tagdb_auto_complete_tags(self, exprs, tagPrefix=None, limit=None, requestContext=None):
log.debug(
'graphite.storage.Store.auto_complete_tags :: Starting lookup on all backends')
if requestContext is None:
requestContext = {}
context = 'tags for %s %s' % (str(exprs), tagPrefix or '')
jobs = []
use_tagdb = False
for finder in self.get_finders(requestContext.get('localOnly')):
if getattr(finder, 'tags', False):
job = Job(
finder.auto_complete_tags, context,
exprs, tagPrefix=tagPrefix,
limit=limit, requestContext=requestContext
)
jobs.append(job)
else:
use_tagdb = True
results = set()
# if we're using the local tagdb then execute it (in the main thread
# so that LocalDatabaseTagDB will work)
if use_tagdb:
results.update(self.tagdb.auto_complete_tags(
exprs, tagPrefix=tagPrefix,
limit=limit, requestContext=requestContext
))
# Start fetches
start = time.time()
for result in self.wait_jobs(jobs, settings.FIND_TIMEOUT, context):
results.update(result)
# sort & limit results
results = sorted(results)
if limit:
results = results[:int(limit)]
log.debug("Got all autocomplete %s in %fs" % (
context, time.time() - start)
)
return results
def tagdb_auto_complete_values(self, exprs, tag, valuePrefix=None, limit=None, requestContext=None):
log.debug(
'graphite.storage.Store.auto_complete_values :: Starting lookup on all backends')
if requestContext is None:
requestContext = {}
context = 'values for %s %s %s' % (str(exprs), tag, valuePrefix or '')
jobs = []
use_tagdb = False
for finder in self.get_finders(requestContext.get('localOnly')):
if getattr(finder, 'tags', False):
job = Job(
finder.auto_complete_values, context,
exprs, tag, valuePrefix=valuePrefix,
limit=limit, requestContext=requestContext
)
jobs.append(job)
else:
use_tagdb = True
# start finder jobs
start = time.time()
results = set()
# if we're using the local tagdb then execute it (in the main thread
# so that LocalDatabaseTagDB will work)
if use_tagdb:
results.update(self.tagdb.auto_complete_values(
exprs, tag, valuePrefix=valuePrefix,
limit=limit, requestContext=requestContext
))
for result in self.wait_jobs(jobs, settings.FIND_TIMEOUT, context):
results.update(result)
# sort & limit results
results = sorted(results)
if limit:
results = results[:int(limit)]
log.debug("Got all autocomplete %s in %fs" % (
context, time.time() - start)
)
return results
def extractForwardHeaders(request):
headers = {}
for name in settings.REMOTE_STORE_FORWARD_HEADERS:
value = request.META.get('HTTP_%s' % name.upper().replace('-', '_'))
if value is not None:
headers[name] = value
return headers
def write_index(index=None):
if not index:
index = settings.INDEX_FILE
try:
fd, tmp = mkstemp()
try:
tmp_index = os.fdopen(fd, 'wt')
for metric in STORE.get_index():
tmp_index.write("{0}\n".format(metric))
finally:
tmp_index.close()
move(tmp, index)
finally:
try:
os.unlink(tmp)
except OSError:
pass
return None
STORE = Store()
| {
"content_hash": "81ebe3caf227fb2305083e8039a493c5",
"timestamp": "",
"source": "github",
"line_count": 540,
"max_line_length": 106,
"avg_line_length": 35.757407407407406,
"alnum_prop": 0.5638303381842664,
"repo_name": "criteo-forks/graphite-web",
"id": "71232e300d77f62a97fa1ddfed78c30b66926c61",
"size": "19309",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "webapp/graphite/storage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "150191"
},
{
"name": "HTML",
"bytes": "21474"
},
{
"name": "JavaScript",
"bytes": "1690814"
},
{
"name": "Perl",
"bytes": "857"
},
{
"name": "Python",
"bytes": "1248751"
},
{
"name": "Ruby",
"bytes": "1950"
},
{
"name": "Shell",
"bytes": "1113"
}
],
"symlink_target": ""
} |
from altair.vega import SCHEMA_VERSION, SCHEMA_URL
def test_schema_version():
assert SCHEMA_VERSION in SCHEMA_URL
| {
"content_hash": "de651459578826df94bf298a94686c60",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 50,
"avg_line_length": 24,
"alnum_prop": 0.7666666666666667,
"repo_name": "altair-viz/altair",
"id": "c569b86b758989b550fa3993653fe653b9709630",
"size": "120",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "altair/vega/tests/test_import.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "343"
},
{
"name": "Python",
"bytes": "5377805"
},
{
"name": "TeX",
"bytes": "2684"
}
],
"symlink_target": ""
} |
"""Test logic for setting nMinimumChainWork on command line.
Nodes don't consider themselves out of "initial block download" until
their active chain has more work than nMinimumChainWork.
Nodes don't download blocks from a peer unless the peer's best known block
has more work than nMinimumChainWork.
While in initial block download, nodes won't relay blocks to their peers, so
test that this parameter functions as intended by verifying that block relay
only succeeds past a given node once its nMinimumChainWork has been exceeded.
"""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, connect_nodes
# 2 hashes required per regtest block (with no difficulty adjustment)
REGTEST_WORK_PER_BLOCK = 2
class MinimumChainWorkTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [[], ["-minimumchainwork=0x65"],
["-minimumchainwork=0x65"]]
self.node_min_work = [0, 101, 101]
def setup_network(self):
# This test relies on the chain setup being:
# node0 <- node1 <- node2
# Before leaving IBD, nodes prefer to download blocks from outbound
# peers, so ensure that we're mining on an outbound peer and testing
# block relay to inbound peers.
self.setup_nodes()
for i in range(self.num_nodes - 1):
connect_nodes(self.nodes[i + 1], self.nodes[i])
def run_test(self):
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
# minchainwork is exceeded
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
self.log.info(
"Testing relay across node {} (minChainWork = {})".format(
1, self.node_min_work[1]))
starting_blockcount = self.nodes[2].getblockcount()
num_blocks_to_generate = int(
(self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
self.log.info("Generating {} blocks on node0".format(
num_blocks_to_generate))
hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Node0 current chain work: {}".format(
self.nodes[0].getblockheader(hashes[-1])['chainwork']))
# Sleep a few seconds and verify that node2 didn't get any new blocks
# or headers. We sleep, rather than sync_blocks(node0, node1) because
# it's reasonable either way for node1 to get the blocks, or not get
# them (since they're below node1's minchainwork).
time.sleep(3)
self.log.info("Verifying node 2 has no more blocks than before")
self.log.info("Blockcounts: {}".format(
[n.getblockcount() for n in self.nodes]))
# Node2 shouldn't have any new headers yet, because node1 should not
# have relayed anything.
assert_equal(len(self.nodes[2].getchaintips()), 1)
assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
assert self.nodes[1].getbestblockhash(
) != self.nodes[0].getbestblockhash()
assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
self.log.info("Generating one more block")
self.nodes[0].generatetoaddress(
1, self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Verifying nodes are all synced")
# Because nodes in regtest are all manual connections (eg using
# addnode), node1 should not have disconnected node0. If not for that,
# we'd expect node1 to have disconnected node0 for serving an
# insufficient work chain, in which case we'd need to reconnect them to
# continue the test.
self.sync_all()
self.log.info("Blockcounts: {}".format(
[n.getblockcount() for n in self.nodes]))
if __name__ == '__main__':
MinimumChainWorkTest().main()
| {
"content_hash": "563445b67b072bcb69b4da4cc495620d",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 100,
"avg_line_length": 42.7319587628866,
"alnum_prop": 0.6463208685162847,
"repo_name": "cculianu/bitcoin-abc",
"id": "c3b46513274a06b98a0ffbff85e2147a607f095f",
"size": "4354",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional/feature_minchainwork.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "676074"
},
{
"name": "C++",
"bytes": "5385212"
},
{
"name": "HTML",
"bytes": "20970"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "192408"
},
{
"name": "Makefile",
"bytes": "112555"
},
{
"name": "Objective-C",
"bytes": "123566"
},
{
"name": "Objective-C++",
"bytes": "7251"
},
{
"name": "PHP",
"bytes": "4085"
},
{
"name": "Python",
"bytes": "1027736"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Ruby",
"bytes": "740"
},
{
"name": "Shell",
"bytes": "59432"
}
],
"symlink_target": ""
} |
""" Test for the pillow example """
import os
import multiprocessing
import shutil
# Provide access to the helper scripts
def modify_path():
scripts_dir = os.path.dirname(__file__)
while not 'Scripts' in os.listdir(scripts_dir):
scripts_dir = os.path.abspath(os.path.join(scripts_dir, '..'))
scripts_dir = os.path.join(scripts_dir, 'Scripts')
if not scripts_dir in os.environ['PATH']:
os.environ['PATH'] += os.pathsep + scripts_dir
print '\nPATH = {}\n'.format(os.environ['PATH'])
def run():
# The example is run for four element types
eltyps={"S8":"qu8",
"S8R":"qu8r",
"S4":"qu4",
"S4R":"qu4r"}
# read the template fbd file
f = open("run.fbd","r")
lines=f.readlines()
f.close()
# loop over element types
for elty in eltyps.keys():
# open results summary file
print elty
# read pre.fbd and write it to pre-auto.fbd
fout = open("run_auto.fbd", "w")
fout.write("ulin "+elty+"\n")
for line in lines:
# set element type
if line.startswith("valu Etyp"):
line="valu Etyp "+eltyps[elty]+"\n"
fout.write(line)
fout.write("quit\n")
fout.close()
# run run_auto.fbd (preprocessing, solving and postprocessing)
os.system("cgx -b run_auto.fbd")
# store the images.
os.system("monitor.py static")
os.system("mv expanded.png Refs/expanded-"+elty.lower()+".png")
os.system("mv cuty0.png Refs/cuty0-"+elty.lower()+".png")
os.system("mv static.png Refs/static-"+elty.lower()+".png")
# Move new files and folders to 'Refs'
def move(old_snap):
new_snap = os.listdir(os.curdir)
if not os.path.exists('Refs'):
os.mkdir('Refs')
for f in new_snap:
if not f in old_snap:
fname = os.path.basename(f)
new_name = os.path.join(os.curdir, 'Refs', fname)
if os.path.isfile(new_name):
os.remove(new_name)
if os.path.isdir(new_name):
shutil.rmtree(new_name)
os.rename(f, new_name)
if __name__ == '__main__':
# Enable multithreading for ccx
os.environ['OMP_NUM_THREADS'] = str(multiprocessing.cpu_count())
# Explicitly move to example's directory
os.chdir(os.path.dirname(__file__))
# Run the example
modify_path()
snap = os.listdir(os.curdir)
run()
move(snap)
| {
"content_hash": "2d69e81fd2d46d5db826def3ef1fa314",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 71,
"avg_line_length": 30.8875,
"alnum_prop": 0.5742614326183731,
"repo_name": "mkraska/CalculiX-Examples",
"id": "68d9c4fcde4b7c77219908891530d350af1fb42e",
"size": "2490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Pillow/test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6479"
},
{
"name": "GLSL",
"bytes": "3414"
},
{
"name": "Gnuplot",
"bytes": "10112"
},
{
"name": "Makefile",
"bytes": "6802"
},
{
"name": "NASL",
"bytes": "1378"
},
{
"name": "Python",
"bytes": "115410"
}
],
"symlink_target": ""
} |
"""
Sensor for displaying the number of result on Shodan.io.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.shodan/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['shodan==1.7.4']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Shodan"
CONF_QUERY = 'query'
DEFAULT_NAME = 'Shodan Sensor'
ICON = 'mdi:tooltip-text'
SCAN_INTERVAL = timedelta(minutes=15)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_QUERY): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Shodan sensor."""
import shodan
api_key = config.get(CONF_API_KEY)
name = config.get(CONF_NAME)
query = config.get(CONF_QUERY)
data = ShodanData(shodan.Shodan(api_key), query)
try:
data.update()
except shodan.exception.APIError as error:
_LOGGER.warning("Unable to connect to Shodan.io: %s", error)
return False
add_devices([ShodanSensor(data, name)], True)
class ShodanSensor(Entity):
"""Representation of the Shodan sensor."""
def __init__(self, data, name):
"""Initialize the Shodan sensor."""
self.data = data
self._name = name
self._state = None
self._unit_of_measurement = 'Hits'
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
self._state = self.data.details['total']
class ShodanData(object):
"""Get the latest data and update the states."""
def __init__(self, api, query):
"""Initialize the data object."""
self._api = api
self._query = query
self.details = None
def update(self):
"""Get the latest data from shodan.io."""
self.details = self._api.count(self._query)
| {
"content_hash": "fb90269166edff8da699cb0d976e2b54",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 74,
"avg_line_length": 26.318181818181817,
"alnum_prop": 0.6490500863557859,
"repo_name": "ct-23/home-assistant",
"id": "c95d975ec4777bcf156cf9082039c3790dcf11de",
"size": "2895",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sensor/shodan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13788"
},
{
"name": "HTML",
"bytes": "1686761"
},
{
"name": "JavaScript",
"bytes": "15192"
},
{
"name": "Python",
"bytes": "7310847"
},
{
"name": "Ruby",
"bytes": "517"
},
{
"name": "Shell",
"bytes": "15154"
}
],
"symlink_target": ""
} |
import os
import sys
import yaml
import yoci_config
import logging
import logging.config
DEFAULT_BASE_LOGGING_LEVEL = logging.INFO
DEFAULT_VERBOSE_LOGGING_LEVEL = logging.DEBUG
DEFAULT_CONFIG_FILE = 'config.yml'
def init_logger(base_level=DEFAULT_BASE_LOGGING_LEVEL,
verbose_level=DEFAULT_VERBOSE_LOGGING_LEVEL,
logging_config=None):
"""initializes a base logger
you can use this to init a logger in any of your files.
this will use config.py's LOGGER param and logging.dictConfig to configure
the logger for you.
:param int|logging.LEVEL base_level: desired base logging level
:param int|logging.LEVEL verbose_level: desired verbose logging level
:param dict logging_dict: dictConfig based configuration.
used to override the default configuration from config.py
:rtype: `python logger`
"""
if logging_config is None:
logging_config = {}
logging_config = logging_config or yoci_config.LOGGER
# TODO: (IMPRV) only perform file related actions if file handler is
# TODO: (IMPRV) defined.
log_dir = os.path.expanduser(
os.path.dirname(
yoci_config.LOGGER['handlers']['file']['filename']))
if os.path.isfile(log_dir):
sys.exit('file {0} exists - log directory cannot be created '
'there. please remove the file and try again.'
.format(log_dir))
try:
logfile = yoci_config.LOGGER['handlers']['file']['filename']
d = os.path.dirname(logfile)
if not os.path.exists(d) and not len(d) == 0:
os.makedirs(d)
logging.config.dictConfig(logging_config)
lgr = logging.getLogger('user')
# lgr.setLevel(base_level) if not feeder_config.VERBOSE \
lgr.setLevel(base_level)
return lgr
except ValueError as e:
sys.exit('could not initialize logger.'
' verify your logger config'
' and permissions to write to {0} ({1})'
.format(logfile, e))
lgr = init_logger()
def import_config(config_file):
"""returns a configuration object
:param string config_file: path to config file
"""
# get config file path
config_file = config_file or os.path.join(os.getcwd(), DEFAULT_CONFIG_FILE)
lgr.debug('config file is: {}'.format(config_file))
# append to path for importing
# sys.path.append(os.path.dirname(config_file))
try:
lgr.debug('importing dict...')
with open(config_file, 'r') as c:
return yaml.safe_load(c.read())
# TODO: (IMPRV) remove from path after importing
except IOError as ex:
lgr.error(str(ex))
raise RuntimeError('cannot access config file')
except ImportError:
lgr.warning('config file not found: {}.'.format(config_file))
raise RuntimeError('missing config file')
except SyntaxError:
lgr.error('config file syntax is malformatted. please fix '
'any syntax errors you might have and try again.')
raise RuntimeError('bad config file')
| {
"content_hash": "6e0fa872ac8e93198f1882ff2c0b1be5",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 79,
"avg_line_length": 35.91860465116279,
"alnum_prop": 0.6448688896082875,
"repo_name": "cloudify-cosmo/yo-ci",
"id": "e6a4f35d2317ca7856b7aa481392f280a6d88b56",
"size": "3089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yoci/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Python",
"bytes": "28374"
},
{
"name": "Ruby",
"bytes": "2356"
}
],
"symlink_target": ""
} |
import os
import sys
# Edit this if necessary or override the variable in your environment.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'verese.settings')
try:
# For local development in a virtualenv:
from funfactory import manage
except ImportError:
# Production:
# Add a temporary path so that we can import the funfactory
tmp_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'vendor', 'src', 'funfactory')
sys.path.append(tmp_path)
from funfactory import manage
# Let the path magic happen in setup_environ() !
sys.path.remove(tmp_path)
manage.setup_environ(__file__, more_pythonic=True)
if __name__ == "__main__":
manage.main()
| {
"content_hash": "765c5e5afa3a55c817b61c330dd11ff6",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 71,
"avg_line_length": 27.923076923076923,
"alnum_prop": 0.6707988980716253,
"repo_name": "vereseproject/verese",
"id": "47ba94219f064a5fdb8a8f0efb3a46578d946bf0",
"size": "748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "4587"
},
{
"name": "Puppet",
"bytes": "6653"
},
{
"name": "Python",
"bytes": "23346"
},
{
"name": "Ruby",
"bytes": "1462"
},
{
"name": "Shell",
"bytes": "3065"
}
],
"symlink_target": ""
} |
from django import forms
from django.utils.translation import ugettext as _
from .models import Run
class RunInputForm(forms.ModelForm):
distance = forms.FloatField(label=_("Distance (km)"),
widget=forms.TextInput(),
localize=True)
start_date = forms.DateField(label=_("Run date"),
widget=forms.DateInput(attrs={
'id': 'start_datepicker',
'autocomplete': "off"}))
end_date = forms.DateField(label=_("End date (if entering multiple runs)"),
required=False,
widget=forms.DateInput(
attrs={'id': 'end_datepicker',
'autocomplete': "off"})
)
recorded_time = forms.DurationField(label=_("Time (HH:MM:SS, optional)"),
required=False)
class Meta:
model = Run
fields = ['distance', 'start_date', 'end_date', 'recorded_time']
def is_valid(self):
valid = super(RunInputForm, self).is_valid()
if not valid:
return valid
if self.cleaned_data['distance'] < 0:
self.add_error('distance', 'Distance cannot be negative')
valid = False
return valid
| {
"content_hash": "7e04b437c7bd17db2a58ecbd48219264",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 79,
"avg_line_length": 36.64102564102564,
"alnum_prop": 0.47725682295311406,
"repo_name": "Socialsquare/RunningCause",
"id": "3626ae593f49584cb12a3f150732cb7739cb4813",
"size": "1445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runs/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32981"
},
{
"name": "HTML",
"bytes": "97326"
},
{
"name": "JavaScript",
"bytes": "50418"
},
{
"name": "Python",
"bytes": "132614"
},
{
"name": "Shell",
"bytes": "61"
}
],
"symlink_target": ""
} |
"""
Support for tracking the proximity of a device.
Component to monitor the proximity of devices to a particular zone and the
direction of travel.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/proximity/
"""
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_ZONE, CONF_DEVICES, CONF_UNIT_OF_MEASUREMENT)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_state_change
from homeassistant.util.distance import convert
from homeassistant.util.location import distance
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_DIR_OF_TRAVEL = 'dir_of_travel'
ATTR_DIST_FROM = 'dist_to_zone'
ATTR_NEAREST = 'nearest'
CONF_IGNORED_ZONES = 'ignored_zones'
CONF_TOLERANCE = 'tolerance'
DEFAULT_DIR_OF_TRAVEL = 'not set'
DEFAULT_DIST_TO_ZONE = 'not set'
DEFAULT_NEAREST = 'not set'
DEFAULT_PROXIMITY_ZONE = 'home'
DEFAULT_TOLERANCE = 1
DEPENDENCIES = ['zone', 'device_tracker']
DOMAIN = 'proximity'
UNITS = ['km', 'm', 'mi', 'ft']
ZONE_SCHEMA = vol.Schema({
vol.Optional(CONF_ZONE, default=DEFAULT_PROXIMITY_ZONE): cv.string,
vol.Optional(CONF_DEVICES, default=[]):
vol.All(cv.ensure_list, [cv.entity_id]),
vol.Optional(CONF_IGNORED_ZONES, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.All(cv.string, vol.In(UNITS)),
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
cv.slug: ZONE_SCHEMA,
}),
}, extra=vol.ALLOW_EXTRA)
def setup_proximity_component(hass, name, config):
"""Set up individual proximity component."""
ignored_zones = config.get(CONF_IGNORED_ZONES)
proximity_devices = config.get(CONF_DEVICES)
tolerance = config.get(CONF_TOLERANCE)
proximity_zone = name
unit_of_measurement = config.get(
CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit)
zone_id = 'zone.{}'.format(proximity_zone)
proximity = Proximity(hass, proximity_zone, DEFAULT_DIST_TO_ZONE,
DEFAULT_DIR_OF_TRAVEL, DEFAULT_NEAREST,
ignored_zones, proximity_devices, tolerance,
zone_id, unit_of_measurement)
proximity.entity_id = '{}.{}'.format(DOMAIN, proximity_zone)
proximity.update_ha_state()
track_state_change(
hass, proximity_devices, proximity.check_proximity_state_change)
return True
def setup(hass, config):
"""Get the zones and offsets from configuration.yaml."""
for zone, proximity_config in config[DOMAIN].items():
setup_proximity_component(hass, zone, proximity_config)
return True
class Proximity(Entity):
"""Representation of a Proximity."""
def __init__(self, hass, zone_friendly_name, dist_to, dir_of_travel,
nearest, ignored_zones, proximity_devices, tolerance,
proximity_zone, unit_of_measurement):
"""Initialize the proximity."""
self.hass = hass
self.friendly_name = zone_friendly_name
self.dist_to = dist_to
self.dir_of_travel = dir_of_travel
self.nearest = nearest
self.ignored_zones = ignored_zones
self.proximity_devices = proximity_devices
self.tolerance = tolerance
self.proximity_zone = proximity_zone
self._unit_of_measurement = unit_of_measurement
@property
def name(self):
"""Return the name of the entity."""
return self.friendly_name
@property
def state(self):
"""Return the state."""
return self.dist_to
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
@property
def state_attributes(self):
"""Return the state attributes."""
return {
ATTR_DIR_OF_TRAVEL: self.dir_of_travel,
ATTR_NEAREST: self.nearest,
}
def check_proximity_state_change(self, entity, old_state, new_state):
"""Function to perform the proximity checking."""
entity_name = new_state.name
devices_to_calculate = False
devices_in_zone = ''
zone_state = self.hass.states.get(self.proximity_zone)
proximity_latitude = zone_state.attributes.get('latitude')
proximity_longitude = zone_state.attributes.get('longitude')
# Check for devices in the monitored zone.
for device in self.proximity_devices:
device_state = self.hass.states.get(device)
if device_state is None:
devices_to_calculate = True
continue
if device_state.state not in self.ignored_zones:
devices_to_calculate = True
# Check the location of all devices.
if (device_state.state).lower() == (self.friendly_name).lower():
device_friendly = device_state.name
if devices_in_zone != '':
devices_in_zone = devices_in_zone + ', '
devices_in_zone = devices_in_zone + device_friendly
# No-one to track so reset the entity.
if not devices_to_calculate:
self.dist_to = 'not set'
self.dir_of_travel = 'not set'
self.nearest = 'not set'
self.update_ha_state()
return
# At least one device is in the monitored zone so update the entity.
if devices_in_zone != '':
self.dist_to = 0
self.dir_of_travel = 'arrived'
self.nearest = devices_in_zone
self.update_ha_state()
return
# We can't check proximity because latitude and longitude don't exist.
if 'latitude' not in new_state.attributes:
return
# Collect distances to the zone for all devices.
distances_to_zone = {}
for device in self.proximity_devices:
# Ignore devices in an ignored zone.
device_state = self.hass.states.get(device)
if device_state.state in self.ignored_zones:
continue
# Ignore devices if proximity cannot be calculated.
if 'latitude' not in device_state.attributes:
continue
# Calculate the distance to the proximity zone.
dist_to_zone = distance(proximity_latitude,
proximity_longitude,
device_state.attributes['latitude'],
device_state.attributes['longitude'])
# Add the device and distance to a dictionary.
distances_to_zone[device] = round(
convert(dist_to_zone, 'm', self.unit_of_measurement), 1)
# Loop through each of the distances collected and work out the
# closest.
closest_device = None # type: str
dist_to_zone = None # type: float
for device in distances_to_zone:
if not dist_to_zone or distances_to_zone[device] < dist_to_zone:
closest_device = device
dist_to_zone = distances_to_zone[device]
# If the closest device is one of the other devices.
if closest_device != entity:
self.dist_to = round(distances_to_zone[closest_device])
self.dir_of_travel = 'unknown'
device_state = self.hass.states.get(closest_device)
self.nearest = device_state.name
self.update_ha_state()
return
# Stop if we cannot calculate the direction of travel (i.e. we don't
# have a previous state and a current LAT and LONG).
if old_state is None or 'latitude' not in old_state.attributes:
self.dist_to = round(distances_to_zone[entity])
self.dir_of_travel = 'unknown'
self.nearest = entity_name
self.update_ha_state()
return
# Reset the variables
distance_travelled = 0
# Calculate the distance travelled.
old_distance = distance(proximity_latitude, proximity_longitude,
old_state.attributes['latitude'],
old_state.attributes['longitude'])
new_distance = distance(proximity_latitude, proximity_longitude,
new_state.attributes['latitude'],
new_state.attributes['longitude'])
distance_travelled = round(new_distance - old_distance, 1)
# Check for tolerance
if distance_travelled < self.tolerance * -1:
direction_of_travel = 'towards'
elif distance_travelled > self.tolerance:
direction_of_travel = 'away_from'
else:
direction_of_travel = 'stationary'
# Update the proximity entity
self.dist_to = round(dist_to_zone)
self.dir_of_travel = direction_of_travel
self.nearest = entity_name
self.update_ha_state()
_LOGGER.debug('proximity.%s update entity: distance=%s: direction=%s: '
'device=%s', self.friendly_name, round(dist_to_zone),
direction_of_travel, entity_name)
_LOGGER.info('%s: proximity calculation complete', entity_name)
| {
"content_hash": "da4af3768c7b292bef63808b39ee9484",
"timestamp": "",
"source": "github",
"line_count": 258,
"max_line_length": 79,
"avg_line_length": 36.554263565891475,
"alnum_prop": 0.612342275474499,
"repo_name": "robjohnson189/home-assistant",
"id": "73e72149b3714f8e458a9d3a7a815a7c3a462600",
"size": "9431",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/proximity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1362685"
},
{
"name": "Python",
"bytes": "3499625"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "7255"
}
],
"symlink_target": ""
} |
import pytest
from . import db
from .db import database
from tagtrain import data
def test_unknown_user(database):
with pytest.raises(data.Group.DoesNotExist):
data.by_owner.blacklist_user('non-existent', 'blockee', 'permalink', db.GROUP_NAME)
def test_unknown_group(database):
with pytest.raises(data.Group.DoesNotExist):
data.by_owner.blacklist_user(db.OWNER_NAME, 'blockee', 'permalink', 'non-existent')
def test_existing_blanket(database):
with pytest.raises(data.by_owner.BlanketBlackList):
data.by_owner.blacklist_user('user2', 'blockee', 'permalink', 'group2')
def test_existing_blacklist(database):
PERMALINK = '123'
blacklist, created = data.by_owner.blacklist_user(db.OWNER_NAME, 'blockee', PERMALINK, db.GROUP_NAME)
assert created is False
assert blacklist.perma_proof != PERMALINK
def test_good_blanket(database):
OWNER_NAME = db.OWNER_NAME
MEMBER_NAME = 'four'
PERMALINK = 'my123'
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 0
groups = list(data.by_member.find_groups(MEMBER_NAME))
assert len(groups) == 4
bl, created = data.by_owner.blacklist_user(OWNER_NAME, MEMBER_NAME, PERMALINK)
assert created is True
assert bl.owner_reddit_name == OWNER_NAME
assert bl.blocked_reddit_name == MEMBER_NAME
assert bl.group is None
assert bl.perma_proof == PERMALINK
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 1
groups = list(data.by_member.find_groups(MEMBER_NAME))
assert len(groups) == 1
def test_good_group1(database):
OWNER_NAME = db.OWNER_NAME
MEMBER_NAME = 'blockee'
GROUP_NAME = 'group3'
PERMALINK = 'my123'
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 2
bl, created = data.by_owner.blacklist_user(OWNER_NAME, MEMBER_NAME, PERMALINK, GROUP_NAME)
assert created is True
assert bl.owner_reddit_name == OWNER_NAME
assert bl.blocked_reddit_name == MEMBER_NAME
assert bl.group is not None
assert bl.group.name == GROUP_NAME
assert bl.perma_proof == PERMALINK
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 3
def test_good_group_delete(database):
OWNER_NAME = db.OWNER_NAME
MEMBER_NAME = 'four'
GROUP_NAME = 'group3'
PERMALINK = 'my123'
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 0
groups = list(data.by_member.find_groups(MEMBER_NAME))
assert len(groups) == 4
bl, created = data.by_owner.blacklist_user(OWNER_NAME, MEMBER_NAME, PERMALINK, GROUP_NAME)
assert created is True
assert bl.owner_reddit_name == OWNER_NAME
assert bl.blocked_reddit_name == MEMBER_NAME
assert bl.group is not None
assert bl.group.name == GROUP_NAME
assert bl.perma_proof == PERMALINK
bls = list(data.by_owner.find_blacklists(OWNER_NAME, MEMBER_NAME))
assert len(bls) == 1
groups = list(data.by_member.find_groups(MEMBER_NAME))
assert len(groups) == 3
| {
"content_hash": "09214a567f631109550f045f77e80141",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 105,
"avg_line_length": 30.61764705882353,
"alnum_prop": 0.6871597822606468,
"repo_name": "c17r/TagTrain",
"id": "458a53d32ff0de663a7ae49361d01363d9917848",
"size": "3123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/data/test_bo_blacklist_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "770"
},
{
"name": "Python",
"bytes": "102883"
},
{
"name": "Shell",
"bytes": "591"
}
],
"symlink_target": ""
} |
import json
"""Python class to hold message data"""
class Message(object):
"""
Holder for message attributes used by python IoAdapter send/receive.
Interface is like proton.Message, but we don't use proton.Message here because
creating a proton.Message has side-effects on the proton engine.
@ivar body: The body of the message, normally a map.
@ivar to: The to-address for the message.
@ivar reply_to: The reply-to address for the message.
@ivar correlation_id: Correlation ID for replying to the message.
@ivar properties: Application properties.
"""
_fields = ['address', 'properties', 'body', 'reply_to', 'correlation_id', 'content_type']
def __init__(self, **kwds):
"""All instance variables can be set as keywords. See L{Message}"""
for f in self._fields:
setattr(self, f, kwds.get(f, None))
for k in kwds:
getattr(self, k) # Check for bad attributes
def __repr__(self):
return "%s(%s)" % (type(self).__name__,
", ".join("%s=%r" % (f, getattr(self, f)) for f in self._fields))
def simplify(msg):
m = {}
for k, v in msg.properties.items():
m[k] = v
if msg.body:
m["body"] = msg.body.decode()
if msg.content_type:
m["content_type"] = msg.content_type
return m
def messages_to_json(msgs):
return json.dumps([simplify(m) for m in msgs], indent=4)
| {
"content_hash": "48211dddbef901532edc1f7f495f546a",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 93,
"avg_line_length": 31.456521739130434,
"alnum_prop": 0.6095369730476848,
"repo_name": "ErnieAllen/qpid-dispatch",
"id": "3301a69fc80733cb1ad2b4070ea7f7f0cd2302e0",
"size": "2235",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "python/qpid_dispatch_internal/router/message.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2828222"
},
{
"name": "C++",
"bytes": "354723"
},
{
"name": "CMake",
"bytes": "57539"
},
{
"name": "CSS",
"bytes": "49129"
},
{
"name": "Dockerfile",
"bytes": "3323"
},
{
"name": "HTML",
"bytes": "2320"
},
{
"name": "JavaScript",
"bytes": "733506"
},
{
"name": "Python",
"bytes": "2734937"
},
{
"name": "Shell",
"bytes": "34107"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name='zseqfile',
description="Library to simplify sequential access to compressed files",
author="Wouter Bolsterlee",
author_email="[email protected]",
url="https://github.com/wbolster/zseqfile",
version='0.1',
packages=find_packages(),
license="BSD License",
classifiers=[
"License :: OSI Approved :: BSD License",
]
)
| {
"content_hash": "4b2eef080ebfec3af1c28983371b8f02",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 76,
"avg_line_length": 27.733333333333334,
"alnum_prop": 0.6658653846153846,
"repo_name": "wbolster/zseqfile",
"id": "265fe54a9cf24b8454f913a00b9fedbbe256b723",
"size": "416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10562"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
connection = db._get_connection()
cursor = connection.cursor()
try:
cursor.execute('select site_description from preferences_generalpreferences')
connection.close()
except:
connection.close()
db.add_column('preferences_generalpreferences', 'site_description', self.gf('django.db.models.fields.CharField')(max_length=512, null=True, blank=True), keep_default=False)
def backwards(self, orm):
db.delete_column('preferences_generalpreferences', 'site_description')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'category.category': {
'Meta': {'ordering': "('title',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'category.tag': {
'Meta': {'ordering': "('title',)", 'object_name': 'Tag'},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'comments.comment': {
'Meta': {'ordering': "('submit_date',)", 'object_name': 'Comment', 'db_table': "'django_comments'"},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '256'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_removed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comment_comments'", 'null': 'True', 'to': "orm['auth.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'user_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'foundry.blogpost': {
'Meta': {'ordering': "('-created',)", 'object_name': 'BlogPost', '_ormbases': ['jmbo.ModelBase']},
'content': ('ckeditor.fields.RichTextField', [], {}),
'modelbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['jmbo.ModelBase']", 'unique': 'True', 'primary_key': 'True'})
},
'foundry.chatroom': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ChatRoom', '_ormbases': ['jmbo.ModelBase']},
'modelbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['jmbo.ModelBase']", 'unique': 'True', 'primary_key': 'True'})
},
'foundry.column': {
'Meta': {'object_name': 'Column'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'designation': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'row': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Row']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {'default': '8'})
},
'foundry.commentreport': {
'Meta': {'object_name': 'CommentReport'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.FoundryComment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reporter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'foundry.country': {
'Meta': {'ordering': "('title',)", 'object_name': 'Country'},
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'minimum_age': ('django.db.models.fields.PositiveIntegerField', [], {'default': '18'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'foundry.defaultavatar': {
'Meta': {'object_name': 'DefaultAvatar'},
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'defaultavatar_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'foundry.foundrycomment': {
'Meta': {'ordering': "('submit_date',)", 'object_name': 'FoundryComment', '_ormbases': ['comments.Comment']},
'comment_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['comments.Comment']", 'unique': 'True', 'primary_key': 'True'}),
'in_reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.FoundryComment']", 'null': 'True', 'blank': 'True'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'})
},
'foundry.link': {
'Meta': {'ordering': "('title', 'subtitle')", 'object_name': 'Link'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'link_target_content_type'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'view_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'})
},
'foundry.listing': {
'Meta': {'ordering': "('title', 'subtitle')", 'object_name': 'Listing'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['jmbo.ModelBase']", 'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {}),
'display_title_tiled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_syndication': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'items_per_page': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'pinned': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'listing_pinned'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['jmbo.ModelBase']"}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '32'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'view_modifier': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'foundry.member': {
'Meta': {'object_name': 'Member', '_ormbases': ['auth.User']},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Country']", 'null': 'True', 'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'member_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'is_profile_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'province': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'receive_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'receive_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'twitter_username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'foundry.menu': {
'Meta': {'ordering': "('title', 'subtitle')", 'object_name': 'Menu'},
'display_title': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '32'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'foundry.menulinkposition': {
'Meta': {'ordering': "('position',)", 'object_name': 'MenuLinkPosition'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'condition_expression': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Link']"}),
'menu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Menu']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {})
},
'foundry.navbar': {
'Meta': {'ordering': "('title', 'subtitle')", 'object_name': 'Navbar'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '32'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'foundry.navbarlinkposition': {
'Meta': {'ordering': "('position',)", 'object_name': 'NavbarLinkPosition'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'condition_expression': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Link']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'navbar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Navbar']"}),
'position': ('django.db.models.fields.IntegerField', [], {})
},
'foundry.notification': {
'Meta': {'object_name': 'Notification'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Link']"}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Member']"})
},
'foundry.page': {
'Meta': {'object_name': 'Page'},
'css': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '32'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'foundry.pageview': {
'Meta': {'object_name': 'PageView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Page']"}),
'view_name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'foundry.row': {
'Meta': {'object_name': 'Row'},
'block_name': ('django.db.models.fields.CharField', [], {'default': "'content'", 'max_length': '32'}),
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'has_left_or_right_column': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Page']"})
},
'foundry.tile': {
'Meta': {'object_name': 'Tile'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'column': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['foundry.Column']"}),
'condition_expression': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'enable_ajax': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tile_target_content_type'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'view_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'jmbo.modelbase': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ModelBase'},
'anonymous_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'anonymous_likes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Category']", 'null': 'True', 'blank': 'True'}),
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'comments_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comments_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modelbase_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'likes_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'likes_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'primary_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'primary_modelbase_set'", 'null': 'True', 'to': "orm['category.Category']"}),
'publish_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['publisher.Publisher']", 'null': 'True', 'blank': 'True'}),
'retract_on': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'unpublished'", 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['category.Tag']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'photologue.photo': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Photo'},
'caption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'crop_from': ('django.db.models.fields.CharField', [], {'default': "'center'", 'max_length': '10', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'effect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'photo_related'", 'null': 'True', 'to': "orm['photologue.PhotoEffect']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'tags': ('photologue.models.TagField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'title_slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'photologue.photoeffect': {
'Meta': {'object_name': 'PhotoEffect'},
'background_color': ('django.db.models.fields.CharField', [], {'default': "'#FFFFFF'", 'max_length': '7'}),
'brightness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'color': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'contrast': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'filters': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'reflection_size': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'reflection_strength': ('django.db.models.fields.FloatField', [], {'default': '0.6'}),
'sharpness': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'transpose_method': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'})
},
'publisher.publisher': {
'Meta': {'object_name': 'Publisher'},
'class_name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'secretballot.vote': {
'Meta': {'unique_together': "(('token', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['foundry']
| {
"content_hash": "6bd6e79bf64de5281f0e3f57a55de3a8",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 208,
"avg_line_length": 87.12569832402235,
"alnum_prop": 0.5471450097784617,
"repo_name": "praekelt/jmbo-foundry",
"id": "53a672786a28894ea96317935e5e508b62afe1ee",
"size": "31215",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "foundry/migrations/0053_add_field_GeneralPreferences_site_description.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "23769"
},
{
"name": "HTML",
"bytes": "82372"
},
{
"name": "JavaScript",
"bytes": "60278"
},
{
"name": "Python",
"bytes": "2030184"
}
],
"symlink_target": ""
} |
"""
:module: watchdog.observers.api
:synopsis: Classes useful to observer implementers.
:author: [email protected] (Yesudeep Mangalapilly)
Immutables
----------
.. autoclass:: ObservedWatch
:members:
:show-inheritance:
Collections
-----------
.. autoclass:: EventQueue
:members:
:show-inheritance:
Classes
-------
.. autoclass:: EventEmitter
:members:
:show-inheritance:
.. autoclass:: EventDispatcher
:members:
:show-inheritance:
.. autoclass:: BaseObserver
:members:
:show-inheritance:
"""
from __future__ import with_statement
import threading
try:
import queue # IGNORE:F0401
except ImportError:
import Queue as queue # IGNORE:F0401
from pathtools.path import absolute_path
from watchdog.utils import DaemonThread
from watchdog.utils.bricks import OrderedSetQueue as SetQueue
DEFAULT_EMITTER_TIMEOUT = 1 # in seconds.
DEFAULT_OBSERVER_TIMEOUT = 1 # in seconds.
# Collection classes
class EventQueue(SetQueue):
"""Thread-safe event queue based on a thread-safe ordered-set queue
to ensure duplicate :class:`FileSystemEvent` objects are prevented from
adding themselves to the queue to avoid dispatching multiple event handling
calls when multiple identical events are produced quicker than an observer
can consume them.
"""
class ObservedWatch(object):
"""An scheduled watch.
:param path:
Path string.
:param recursive:
``True`` if watch is recursive; ``False`` otherwise.
"""
def __init__(self, path, recursive):
self._path = absolute_path(path)
self._is_recursive = recursive
@property
def path(self):
"""The path that this watch monitors."""
return self._path
@property
def is_recursive(self):
"""Determines whether subdirectories are watched for the path."""
return self._is_recursive
@property
def key(self):
return self.path, self.is_recursive
def __eq__(self, watch):
return self.key == watch.key
def __ne__(self, watch):
return self.key != watch.key
def __hash__(self):
return hash(self.key)
def __repr__(self):
return "<ObservedWatch: path=%s, is_recursive=%s>" % (
self.path, self.is_recursive)
# Observer classes
class EventEmitter(DaemonThread):
"""
Producer daemon thread base class subclassed by event emitters
that generate events and populate a queue with them.
:param event_queue:
The event queue to populate with generated events.
:type event_queue:
:class:`watchdog.events.EventQueue`
:param watch:
The watch to observe and produce events for.
:type watch:
:class:`ObservedWatch`
:param timeout:
Timeout (in seconds) between successive attempts at reading events.
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
DaemonThread.__init__(self)
self._event_queue = event_queue
self._watch = watch
self._timeout = timeout
@property
def timeout(self):
"""
Blocking timeout for reading events.
"""
return self._timeout
@property
def watch(self):
"""
The watch associated with this emitter.
"""
return self._watch
def queue_event(self, event):
"""
Queues a single event.
:param event:
Event to be queued.
:type event:
An instance of :class:`watchdog.events.FileSystemEvent`
or a subclass.
"""
self._event_queue.put((event, self.watch))
def queue_events(self, timeout):
"""Override this method to populate the event queue with events
per interval period.
:param timeout:
Timeout (in seconds) between successive attempts at
reading events.
:type timeout:
``float``
"""
def on_thread_exit(self):
"""
Override this method for cleaning up immediately before the daemon
thread stops completely.
"""
def run(self):
try:
while self.should_keep_running():
self.queue_events(self.timeout)
finally:
self.on_thread_exit()
class EventDispatcher(DaemonThread):
"""
Consumer daemon thread base class subclassed by event observer threads
that dispatch events from an event queue to appropriate event handlers.
:param timeout:
Event queue blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
DaemonThread.__init__(self)
self._event_queue = EventQueue()
self._timeout = timeout
@property
def timeout(self):
"""Event queue block timeout."""
return self._timeout
@property
def event_queue(self):
"""The event queue which is populated with file system events
by emitters and from which events are dispatched by a dispatcher
thread."""
return self._event_queue
def dispatch_events(self, event_queue, timeout):
"""Override this method to consume events from an event queue, blocking
on the queue for the specified timeout before raising :class:`queue.Empty`.
:param event_queue:
Event queue to populate with one set of events.
:type event_queue:
:class:`EventQueue`
:param timeout:
Interval period (in seconds) to wait before timing out on the
event queue.
:type timeout:
``float``
:raises:
:class:`queue.Empty`
"""
def on_thread_exit(self):
"""Override this method for cleaning up immediately before the daemon
thread stops completely."""
def run(self):
try:
while self.should_keep_running():
try:
self.dispatch_events(self.event_queue, self.timeout)
except queue.Empty:
continue
finally:
self.on_thread_exit()
class BaseObserver(EventDispatcher):
"""Base observer."""
def __init__(self, emitter_class, timeout=DEFAULT_OBSERVER_TIMEOUT):
EventDispatcher.__init__(self, timeout)
self._emitter_class = emitter_class
self._lock = threading.Lock()
self._watches = set()
self._handlers = dict()
self._emitters = set()
self._emitter_for_watch = dict()
def _add_emitter(self, emitter):
self._emitter_for_watch[emitter.watch] = emitter
self._emitters.add(emitter)
def _remove_emitter(self, emitter):
del self._emitter_for_watch[emitter.watch]
self._emitters.remove(emitter)
emitter.stop()
def _get_emitter_for_watch(self, watch):
return self._emitter_for_watch[watch]
def _clear_emitters(self):
for emitter in self._emitters:
emitter.stop()
self._emitters.clear()
self._emitter_for_watch.clear()
def _add_handler_for_watch(self, event_handler, watch):
try:
self._handlers[watch].add(event_handler)
except KeyError:
self._handlers[watch] = set([event_handler])
def _get_handlers_for_watch(self, watch):
return self._handlers[watch]
def _remove_handlers_for_watch(self, watch):
del self._handlers[watch]
def _remove_handler_for_watch(self, handler, watch):
handlers = self._get_handlers_for_watch(watch)
handlers.remove(handler)
def schedule(self, event_handler, path, recursive=False):
"""
Schedules watching a path and calls appropriate methods specified
in the given event handler in response to file system events.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param path:
Directory path that will be monitored.
:type path:
``str``
:param recursive:
``True`` if events will be emitted for sub-directories
traversed recursively; ``False`` otherwise.
:type recursive:
``bool``
:return:
An :class:`ObservedWatch` object instance representing
a watch.
"""
with self._lock:
watch = ObservedWatch(path, recursive)
self._add_handler_for_watch(event_handler, watch)
try:
# If we have an emitter for this watch already, we don't create a
# new emitter. Instead we add the handler to the event
# object.
emitter = self._get_emitter_for_watch(watch)
except KeyError:
# Create a new emitter and start it.
emitter = self._emitter_class(event_queue=self.event_queue,
watch=watch,
timeout=self.timeout)
self._add_emitter(emitter)
emitter.start()
self._watches.add(watch)
return watch
def add_handler_for_watch(self, event_handler, watch):
"""Adds a handler for the given watch.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param watch:
The watch to add a handler for.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
self._add_handler_for_watch(event_handler, watch)
def remove_handler_for_watch(self, event_handler, watch):
"""Removes a handler for the given watch.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param watch:
The watch to remove a handler for.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
self._remove_handler_for_watch(event_handler, watch)
def unschedule(self, watch):
"""Unschedules a watch.
:param watch:
The watch to unschedule.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
try:
emitter = self._get_emitter_for_watch(watch)
self._remove_handlers_for_watch(watch)
self._remove_emitter(emitter)
self._watches.remove(watch)
except KeyError:
raise
def unschedule_all(self):
"""Unschedules all watches and detaches all associated event
handlers."""
with self._lock:
self._handlers.clear()
self._clear_emitters()
self._watches.clear()
def on_thread_exit(self):
self.unschedule_all()
def _dispatch_event(self, event, watch):
with self._lock:
for handler in self._get_handlers_for_watch(watch):
handler.dispatch(event)
def dispatch_events(self, event_queue, timeout):
event, watch = event_queue.get(block=True, timeout=timeout)
try:
self._dispatch_event(event, watch)
except KeyError:
# All handlers for the watch have already been removed. We cannot
# lock properly here, because `event_queue.get` blocks whenever the
# queue is empty.
pass
event_queue.task_done()
| {
"content_hash": "b922badb5fafb6060a6c4933ed7c2367",
"timestamp": "",
"source": "github",
"line_count": 407,
"max_line_length": 79,
"avg_line_length": 27.479115479115478,
"alnum_prop": 0.6596924177396281,
"repo_name": "austinwagner/sublime-sourcepawn",
"id": "1de80e264a30aa37f03c56e079db71bbd359fd6a",
"size": "11869",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "watchdog/observers/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "204354"
}
],
"symlink_target": ""
} |
from catstalk.cli import parser
def test_init():
# With default path
args = parser.parse_args("init".split())
assert args.command[0] == "init"
# With specific path
args = parser.parse_args("init test".split())
assert args.command[0] == "init"
assert args.command[1] == "test"
def test_build():
# With default path
args = parser.parse_args("build".split())
assert args.command[0] == "build"
# With specific path
args = parser.parse_args("build test".split())
assert args.command[0] == "build"
assert args.command[1] == "test"
| {
"content_hash": "5a1d19f4a2a0d121d74c0bc745f82732",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 50,
"avg_line_length": 27.857142857142858,
"alnum_prop": 0.629059829059829,
"repo_name": "helloqiu/Catstalk",
"id": "ecf6186c653ff96f3b6017fa21c76159bb7ddf30",
"size": "610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20075"
}
],
"symlink_target": ""
} |
"""
sphinx.util.jsdump
~~~~~~~~~~~~~~~~~~
This module implements a simple JavaScript serializer.
Uses the basestring encode function from simplejson by Bob Ippolito.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from sphinx.util.pycompat import u
_str_re = re.compile(r'"(\\\\|\\"|[^"])*"')
_int_re = re.compile(r'\d+')
_name_re = re.compile(r'[a-zA-Z]\w*')
_nameonly_re = re.compile(r'[a-zA-Z]\w*$')
# escape \, ", control characters and everything outside ASCII
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
ESCAPE_DICT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
ESCAPED = re.compile(r'\\u.{4}|\\.')
def encode_string(s):
def replace(match):
s = match.group(0)
try:
return ESCAPE_DICT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
def decode_string(s):
return ESCAPED.sub(lambda m: eval(u + '"' + m.group() + '"'), s)
reswords = set("""\
abstract else instanceof switch
boolean enum int synchronized
break export interface this
byte extends long throw
case false native throws
catch final new transient
char finally null true
class float package try
const for private typeof
continue function protected var
debugger goto public void
default if return volatile
delete implements short while
do import static with
double in super""".split())
def dumps(obj, key=False):
if key:
if not isinstance(obj, basestring):
obj = str(obj)
if _nameonly_re.match(obj) and obj not in reswords:
return obj # return it as a bare word
else:
return encode_string(obj)
if obj is None:
return 'null'
elif obj is True or obj is False:
return obj and 'true' or 'false'
elif isinstance(obj, (int, long, float)):
return str(obj)
elif isinstance(obj, dict):
return '{%s}' % ','.join('%s:%s' % (
dumps(key, True),
dumps(value)
) for key, value in obj.iteritems())
elif isinstance(obj, (tuple, list, set)):
return '[%s]' % ','.join(dumps(x) for x in obj)
elif isinstance(obj, basestring):
return encode_string(obj)
raise TypeError(type(obj))
def dump(obj, f):
f.write(dumps(obj))
def loads(x):
"""Loader that can read the JS subset the indexer produces."""
nothing = object()
i = 0
n = len(x)
stack = []
obj = nothing
key = False
keys = []
while i < n:
c = x[i]
if c == '{':
obj = {}
stack.append(obj)
key = True
keys.append(nothing)
i += 1
elif c == '[':
obj = []
stack.append(obj)
key = False
keys.append(nothing)
i += 1
elif c in '}]':
if key:
if keys[-1] is not nothing:
raise ValueError("unfinished dict")
# empty dict
key = False
oldobj = stack.pop()
keys.pop()
if stack:
obj = stack[-1]
if isinstance(obj, dict):
if keys[-1] is nothing:
raise ValueError("invalid key object", oldobj)
obj[keys[-1]] = oldobj
else:
obj.append(oldobj)
else:
break
i += 1
elif c == ',':
if key:
raise ValueError("multiple keys")
if isinstance(obj, dict):
key = True
i += 1
elif c == ':':
if not isinstance(obj, dict):
raise ValueError("colon in list")
i += 1
if not key:
raise ValueError("multiple values")
key = False
else:
m = _str_re.match(x, i)
if m:
y = decode_string(m.group()[1:-1])
else:
m = _int_re.match(x, i)
if m:
y = int(m.group())
else:
m = _name_re.match(x, i)
if m:
y = m.group()
if y == 'true':
y = True
elif y == 'false':
y = False
elif y == 'null':
y = None
elif not key:
raise ValueError("bareword as value")
else:
raise ValueError("read error at pos %d" % i)
i = m.end()
if isinstance(obj, dict):
if key:
keys[-1] = y
else:
obj[keys[-1]] = y
key = False
else:
obj.append(y)
if obj is nothing:
raise ValueError("nothing loaded from string")
return obj
def load(f):
return loads(f.read())
| {
"content_hash": "9bcd0013f4ecee1b06d610e9a3cbb1a5",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 72,
"avg_line_length": 28.695876288659793,
"alnum_prop": 0.44817675588288125,
"repo_name": "kiwicopple/MyMDb",
"id": "85845a7225d226bcce521654fe532adccd4f796e",
"size": "5591",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/sphinx/util/jsdump.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "433713"
},
{
"name": "C++",
"bytes": "21783"
},
{
"name": "CSS",
"bytes": "88676"
},
{
"name": "JavaScript",
"bytes": "192343"
},
{
"name": "Makefile",
"bytes": "8470"
},
{
"name": "PowerShell",
"bytes": "8104"
},
{
"name": "Python",
"bytes": "10594687"
},
{
"name": "Shell",
"bytes": "885"
},
{
"name": "TeX",
"bytes": "112147"
}
],
"symlink_target": ""
} |
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import get_host, connection_from_url
from .exceptions import HostChangedError
from .request import RequestMethods
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
port_by_scheme = {
'http': 80,
'https': 443,
}
log = logging.getLogger(__name__)
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least recently
used pool.
:param \**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example: ::
>>> manager = PoolManager()
>>> r = manager.urlopen("http://google.com/")
>>> r = manager.urlopen("http://google.com/mail")
>>> r = manager.urlopen("http://yahoo.com/")
>>> len(r.pools)
2
"""
# TODO: Make sure there are no memory leaks here.
def __init__(self, num_pools=10, **connection_pool_kw):
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools)
def connection_from_host(self, host, port=80, scheme='http'):
"""
Get a :class:`ConnectionPool` based on the host, port, and scheme.
Note that an appropriate ``port`` value is required here to normalize
connection pools in our container most effectively.
"""
pool_key = (scheme, host, port)
# If the scheme, host, or port doesn't match existing open connections,
# open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
pool_cls = pool_classes_by_scheme[scheme]
pool = pool_cls(host, port, **self.connection_pool_kw)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url` but
doesn't pass any additional parameters to the
:class:`urllib3.connectionpool.ConnectionPool` constructor.
Additional parameters are taken from the :class:`.PoolManager`
constructor.
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
return self.connection_from_host(host, port=port, scheme=scheme)
def urlopen(self, method, url, **kw):
"""
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`.
``url`` must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
conn = self.connection_from_url(url)
try:
return conn.urlopen(method, url, **kw)
except HostChangedError, e:
kw['retries'] = e.retries # Persist retries countdown
return self.urlopen(method, e.new_url, **kw)
class ProxyManager(RequestMethods):
"""
Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method
will make requests to any url through the defined proxy.
"""
def __init__(self, proxy_pool):
self.proxy_pool = proxy_pool
def urlopen(self, method, url, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
kw['assert_same_host'] = False
return self.proxy_pool.urlopen(method, url, **kw)
def proxy_from_url(url, **pool_kw):
proxy_pool = connection_from_url(url, **pool_kw)
return ProxyManager(proxy_pool)
| {
"content_hash": "8e9592a22c19fc2fc062518572c61b5d",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 80,
"avg_line_length": 30.661417322834644,
"alnum_prop": 0.6420133538777607,
"repo_name": "mikewesner-wf/glasshouse",
"id": "482ee4ad9fbdaf2f0ef13af29ef7c819e35cfa34",
"size": "4123",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "glasshouse.indigoPlugin/Contents/Server Plugin/requests/packages/urllib3/poolmanager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1449"
},
{
"name": "CSS",
"bytes": "490924"
},
{
"name": "JavaScript",
"bytes": "854636"
},
{
"name": "Python",
"bytes": "5578834"
},
{
"name": "Shell",
"bytes": "215"
}
],
"symlink_target": ""
} |
"""Tests for execution_util.py."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import mock
import subprocess
from gslib.tests import testcase
from gslib.utils import execution_util
class TestExecutionUtil(testcase.GsUtilUnitTestCase):
"""Test execution utils."""
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsNoOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = (None, None)
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertIsNone(stdout)
self.assertIsNone(stderr)
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsStringOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = ('a', 'b')
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertEqual(stdout, 'a')
self.assertEqual(stderr, 'b')
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsBytesOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = (b'a', b'b')
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertEqual(stdout, 'a')
self.assertEqual(stderr, 'b')
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsNoOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 1
mock_command_process.communicate.return_value = (None, b'error')
mock_Popen.return_value = mock_command_process
with self.assertRaises(OSError):
execution_util.ExecuteExternalCommand(['fake-command'])
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
| {
"content_hash": "f8ce141d8c203a459dc461de624f78a1",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 76,
"avg_line_length": 37.89333333333333,
"alnum_prop": 0.6586910626319493,
"repo_name": "catapult-project/catapult",
"id": "868c6b6c0e477c5de5b6992baa3ab363f02d7f97",
"size": "3462",
"binary": false,
"copies": "8",
"ref": "refs/heads/main",
"path": "third_party/gsutil/gslib/tests/test_execution_util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
'''
Created on 2013-10-17
@author: nicolas
'''
import MessageField
import Exception
class Field(MessageField.Field):
'''
This class is part of an ensemble of classes
that can be used as an utility for packing and unpacking A429 messages.
LabelField is more specifically dedicated to managing messages label,
which are located in the bits 1 to 8 with LSB at bit 8.
'''
def __repr__(self):
if self._label is not None:
return '<%s.%s object at 0x%x, Label %s [%s]>'%(self.__module__,
self.__class__.__name__,
id(self),
oct(self._label),
repr(MessageField.Field))
else:
return '<%s.%s object at 0x%x [%s]>'%(self.__module__,
self.__class__.__name__,
id(self),
repr(MessageField.Field))
def __init__(self):
'''
Simply declare an 8 bits field at lsb 1
'''
MessageField.Field.__init__(self,1, 8, 'label')
self._label = None
def is_data_set(self):
return self._label is not None
def setData(self,label):
''' set the label property
This function expect label number passed on octal form
The number is expected as a string so that we always treat the passed
value as an integer.
'''
if type(label)!=type(str()):
raise Exception.A429Exception('Label should be given as strings')
try:
self._label = int(label,8)
except ValueError:
raise Exception.A429MsgRangeError(self.name,\
0377,\
label)
if(self._label<0):
raise Exception.A429MsgRangeError(self.name,\
0,\
label)
def getData(self):
''' get the label property '''
if self._label is None:
raise Exception.A429NoData(self.name)
else:
return self._label
def clear(self):
'''
Clear the label value
'''
self._label = None
def pack(self):
'''
Return the 32 bits word corresponding to an A429 message with the label data (all other bits at zero)
'''
if self._label is None:
raise Exception.A429NoData(self.name)
else:
reverted = int('{:08b}'.format(self._label)[::-1], 2) #let's reverse the bit
return MessageField.Field.pack(self,reverted)
def unpack(self,A429word):
""" set the label given a 32 bit ARINC 429 message value """
labelrev= MessageField.Field.unpack(self,A429word)
self._label= int('{:08b}'.format(labelrev)[::-1], 2)
def __eq__(self, other):
'''
Define the == operator to compare field definition AND label
'''
if isinstance(other, Field):
return self.__dict__ == other.__dict__
else:
return NotImplemented
def __ne__(self, other):
'''
Define the != operator to compare field definition AND label
'''
result = self.__eq__(other)
if result is NotImplemented:
return result
return not result
def serialize(self, stream, serializeState = False , parentElement = None):
'''
Serialize Field to XML
'''
from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree
fieldElement = super(Field,self).serialize(stream,serializeState,parentElement)
fieldElement.set('type',__name__)
fieldElement.set('label', oct(self._label))
return fieldElement | {
"content_hash": "334df6015852cb1ad3294258dd4965c6",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 109,
"avg_line_length": 35.33620689655172,
"alnum_prop": 0.49597462795803854,
"repo_name": "superliujian/Py429",
"id": "58b79f4d4639f5ac10267cefbce3b592877d96cd",
"size": "4099",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ARINC429/Label.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64687"
}
],
"symlink_target": ""
} |
import unittest
from django.core.exceptions import FieldDoesNotExist
from django.db import connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.operations import CreateModel
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.fields import NOT_PROVIDED
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import MigrationTestBase
try:
import sqlparse
except ImportError:
sqlparse = None
class Mixin:
pass
class OperationTestBase(MigrationTestBase):
"""
Common functions to help test operations.
"""
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self, app_label, second_model=False, third_model=False, index=False, multicol_index=False,
related_model=False, mti_model=False, proxy_model=False, manager_model=False,
unique_together=False, options=False, db_table=None, index_together=False):
"""
Creates a test model state and database table.
"""
# Delete the tables if they already exist
table_names = [
# Start with ManyToMany tables
'_pony_stables', '_pony_vans',
# Then standard model tables
'_pony', '_stable', '_van',
]
tables = [(app_label + table_name) for table_name in table_names]
with connection.cursor() as cursor:
table_names = connection.introspection.table_names(cursor)
connection.disable_constraint_checking()
sql_delete_table = connection.schema_editor().sql_delete_table
with transaction.atomic():
for table in tables:
if table in table_names:
cursor.execute(sql_delete_table % {
"table": connection.ops.quote_name(table),
})
connection.enable_constraint_checking()
# Make the "current" state
model_options = {
"swappable": "TEST_SWAP_MODEL",
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)]
if index:
operations.append(migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx")
))
if multicol_index:
operations.append(migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx")
))
if second_model:
operations.append(migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
]
))
if third_model:
operations.append(migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
]
))
if related_model:
operations.append(migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
("friend", models.ForeignKey("self", models.CASCADE))
],
))
if mti_model:
operations.append(migrations.CreateModel(
"ShetlandPony",
fields=[
('pony_ptr', models.OneToOneField(
'Pony',
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
bases=['%s.Pony' % app_label],
))
if proxy_model:
operations.append(migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=['%s.Pony' % app_label],
))
if manager_model:
operations.append(migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
))
return self.apply_operations(app_label, ProjectState(), operations)
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())])
definition = operation.deconstruct()
self.assertNotIn('managers', definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.Pony",),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.pony",),
)
message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, UnicodeModel,),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.unicodemodel',),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.UnicodeModel',),
)
message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(models.Model, models.Model,),
)
message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(Mixin, Mixin,),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards("test_crmoua", editor, project_state, new_state)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables"))
]
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmomm", editor, new_state, project_state)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
('pony_ptr', models.OneToOneField(
'test_crmoih.Pony',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmoih", editor, new_state, project_state)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crprmo", editor, new_state, project_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crummo", editor, new_state, project_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlprmo", editor, new_state, project_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate backwards
original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState('migrations', 'Foo', []))
operation = migrations.RenameModel('Foo', 'Bar')
operation.state_forwards('migrations', state)
self.assertNotIn('apps', state.__dict__)
self.assertNotIn(('migrations', 'foo'), state.models)
self.assertIn(('migrations', 'bar'), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel('Bar', 'Foo')
operation.state_forwards('migrations', state)
self.assertIs(state.apps, apps)
self.assertNotIn(('migrations', 'bar'), state.models)
self.assertIn(('migrations', 'foo'), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
'self',
new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model
)
HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider')
self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards("test_rmwsrf", editor, new_state, project_state)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse")
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model,
new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model
)
# Before running the migration we have a table for Shetland Pony, not Little Horse
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id"))
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id"))
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("ReflexivePony", fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Pony", "Pony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Rider", "Rider2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("PonyRider", fields=[
("id", models.AutoField(primary_key=True)),
("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)),
("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)),
]),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"),
),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adfl", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adchfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adtxtfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adbinfl", project_state, [
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
])
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_regr22168", editor, project_state, new_state)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adflpd", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"])
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adflmm", editor, new_state, project_state)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field('stables').blank)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField(
"Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True)
)
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field('stables').blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies"))
])
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations("test_rmflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations("test_rmflmm", project_state, operations=operations)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[
migrations.CreateModel("PonyStables", fields=[
("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)),
("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)),
]),
migrations.AddField(
"Pony", "stables",
models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables')
)
])
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2")
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony")
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = 'pony_foo'
project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable"))
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name='pony', table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
self.assertEqual(operation.describe(), "Alter field pink on Pony")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False)
self.assertIs(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_pk(self):
"""
Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness)
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpk", editor, new_state, project_state)
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony")
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider")
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpkfk", editor, project_state, new_state)
assertIdTypeEqualsFkType()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpkfk", editor, new_state, project_state)
assertIdTypeEqualsFkType()
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
])
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)),
migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)),
])
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_rename_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameField('Rider', 'id', 'id2'),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
], atomic=connection.features.supports_atomic_references_rename)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True)
# Test the state alteration
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
# Make sure the unique_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
# Make sure the index_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0])
# Test the database alteration
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Ensure the unique constraint has been ported over
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_rnfl_pony")
# Ensure the index constraint has been ported over
self.assertIndexExists("test_rnfl_pony", ["weight", "blue"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Ensure the index constraint has been reset
self.assertIndexExists("test_rnfl_pony", ["weight", "pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"})
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState('app', 'model', []))
with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"):
migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state)
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alunto", editor, project_state, new_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alunto", editor, new_state, project_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}})
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))")
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields='pink'> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony")
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'index': index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations('test_rmin', new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model('test_adinsf')
index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx')
old_model = project_state.apps.get_model('test_adinsf', 'Pony')
new_state = project_state.clone()
operation = migrations.AddIndex('Pony', index)
operation.state_forwards('test_adinsf', new_state)
new_model = new_state.apps.get_model('test_adinsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model('test_rminsf')
index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx')
migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state)
old_model = project_state.apps.get_model('test_rminsf', 'Pony')
new_state = project_state.clone()
operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx')
operation.state_forwards('test_rminsf', new_state)
new_model = new_state.apps.get_model('test_rminsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflin", editor, new_state, project_state)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0)
self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alinto", editor, new_state, project_state)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}})
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))")
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}})
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony")
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None),
"pony"
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alorwrtto", editor, project_state, new_state)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alorwrtto", editor, new_state, project_state)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"})
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Change managers on Pony")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model('test_almoma', 'pony')
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name='Rider',
name='pony',
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards("test_alfk", editor, project_state, create_state)
alter_operation.database_forwards("test_alfk", editor, create_state, alter_state)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_afknfk", editor, new_state, project_state)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'")
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'")
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"])
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']],
("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']),
]
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards("test_runsql", editor, project_state, new_state)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards("test_runsql", editor, new_state, project_state)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[
["INSERT INTO foo (bar) VALUES ('buz');"]
],
# backwards
(
("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'),
),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards("test_runsql", editor, project_state, new_state)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards("test_runsql", editor, new_state, project_state)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
# Now test we can't use a string
with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but without reverse_code set
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6)
self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE))
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards("test_books", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards("test_books", editor, project_state, new_state)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2 ** 33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE))
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards("test_author", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def test_autofield_foreignfield_growth(self):
"""
A field may be migrated from AutoField to BigAutoField.
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=2 ** 33)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=2 ** 33, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", models.AutoField(primary_key=True)),
("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField("Article", "id", models.BigAutoField(primary_key=True))
grow_blog_id = migrations.AlterField("Blog", "id", models.BigAutoField(primary_key=True))
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards("test_article", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards("test_article", editor, project_state, new_state)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, models.BigAutoField)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards("test_blog", editor, project_state, new_state)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, models.BigAutoField)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
operation.database_backwards("test_runpython", editor, new_state, project_state)
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;"
)
state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation],
database_operations=[database_operation]
)
self.assertEqual(operation.describe(), "Custom state/database change combination")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"])
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ['migrations']
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crigsw", editor, new_state, project_state)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dligsw", editor, new_state, project_state)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfligsw", editor, project_state, new_state)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfligsw", editor, new_state, project_state)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel')
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state('test_adinigsw', operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards('test_adinigsw', editor, project_state, new_state)
operation.database_backwards('test_adinigsw', editor, new_state, project_state)
operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards('test_rminigsw', editor, project_state, new_state)
operation.database_backwards('test_rminigsw', editor, new_state, project_state)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
CreateModel('name', [], bases=(Mixin, models.Model)).references_model('other_model')
| {
"content_hash": "7d2c9d18e08b23ae6c012c6d35430d86",
"timestamp": "",
"source": "github",
"line_count": 2574,
"max_line_length": 119,
"avg_line_length": 48.85081585081585,
"alnum_prop": 0.617820616818565,
"repo_name": "reinout/django",
"id": "fb429e19cf3d31f4b3ecfc5c5dbcf3416145b585",
"size": "125742",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/migrations/test_operations.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53138"
},
{
"name": "HTML",
"bytes": "172977"
},
{
"name": "JavaScript",
"bytes": "448151"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12147106"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
DEBUG = True
SECRET_KEY = 'lalalalalalala'
SENTRY_DSN = ""
SQLALCHEMY_ECHO = DEBUG
SQLALCHEMY_DATABASE_URI = "sqlite:///database.db"
CACHE_TYPE = "simple"
CACHE_KEY_PREFIX = "{{cookiecutter.app_name}}::"
DEBUG_TB_ENABLED = DEBUG
PROFILE = False
| {
"content_hash": "d15b2158e9c44ef3eaf21171c348e4a9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 49,
"avg_line_length": 19.153846153846153,
"alnum_prop": 0.7068273092369478,
"repo_name": "makmanalp/flask-chassis",
"id": "b1f11107f6d0a01aab49029a8f4ee9ff59ac7b3d",
"size": "249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{cookiecutter.app_name}}/conf/dev.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "7987"
},
{
"name": "Python",
"bytes": "11630"
}
],
"symlink_target": ""
} |
import os
import re
import csv
from optparse import OptionParser
def getFeatureFiles(path):
files = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename[-7:]=='feature':
files.append(os.path.join(dirname, filename))
return files
def makeTagMap(feature_files, path):
result = {}
for feature in feature_files:
for line in open(feature):
tagMatch = re.findall('@\w+', line)
if len(tagMatch) > 0:
for x in tagMatch:
key = x
if key[:7] == '@RALLY_': key = key[7:]
if key in result and feature not in result[key]:
result[key].append(feature[len(path):])
else:
result[key] = [feature[len(path):]]
return result
def makeFeatureMap(feature_files, path):
refeature = re.compile(r'(Feature:.+)')
result = {}
for feature in feature_files:
foundFeatureLine = False
for line in open(feature):
tagMatch = refeature.search(line)
if tagMatch:
result[feature[len(path):]] = [tagMatch.group(1)]
foundFeatureLine = True
break
return result
def topHtmlTable(out, hdr):
out.write( '<html><head><link href="coffee-with-milk.css" rel="stylesheet" type="text/css"></head>' )
out.write( '<h3>Legend:</h3><h4>* - Documentation or process story. No tests required.<br />** - Feature not implemented<br />*** - No automated tests for story</h4>')
out.write( '<table><thead><tr>' )
for col in hdr:
out.write( '<th>'+col+'</th>' )
out.write( '<th> Tests </th>' )
out.write( '</tr></thead>' )
out.write( '<tbody>' )
def rowHtmlTable(out, row):
out.write( '<tr>' )
for col in row:
out.write( '<td>'+col+'</td>' )
def rowEndHtmlTable(out):
out.write( '</tr>' )
def bottomHtmlTable(out):
out.write( '</tbody></table></html>' )
if __name__=='__main__':
# Usage and options handling
usage = "Usage: %prog [options] csvfile"
parser = OptionParser(usage=usage)
parser.add_option("-p", "--path", dest="path",
help="Specify path to feature files", default=".")
parser.add_option("-c", "--column", dest="col",
help="Specify which column of input contains tag names", type="int", default=3)
(options, args) = parser.parse_args()
# Get all the feature files in path
files = getFeatureFiles(options.path)
# Create a map of RALLY tags in feature files to the files
tags = makeTagMap(files, options.path)
# Create a map of feature files to feature descriptions
features = makeFeatureMap(files, options.path)
# Open the two output files
tracematrix = open('trace_matrix.html', 'w')
problems = open('no_tests_matrix.html', 'w')
# Open the input CSV file
csvfile = open(args[0], "rU")
table_rows = csv.reader(csvfile)
header = table_rows.next()
# Create top part of the two output files
topHtmlTable(tracematrix, header)
topHtmlTable(problems, header)
for row in table_rows:
# Determine set of features that apply for all the stories in this row
canonical_feature_set = []
stories = row[options.col].split(' ')
for s in stories:
if s in tags:
for f in tags[s]:
if f not in canonical_feature_set: canonical_feature_set.append(f)
rowHtmlTable(tracematrix, row)
if len(canonical_feature_set) == 0:
# What? No tests! That's a problem
rowHtmlTable(problems, row)
rowEndHtmlTable(problems)
else:
tracematrix.write( '<td>' )
for f in canonical_feature_set:
feature_desc = ''
if f in features: feature_desc = features[f][0]
tracematrix.write( f+'<br/>'+feature_desc+'<br/><br/>' )
tracematrix.write( '</td>' )
rowEndHtmlTable(tracematrix)
bottomHtmlTable(tracematrix)
bottomHtmlTable(problems)
tracematrix.close()
problems.close()
| {
"content_hash": "5b9e0574278f33e630c319efe0ea7967",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 168,
"avg_line_length": 30.55,
"alnum_prop": 0.6636661211129297,
"repo_name": "inbloom/secure-data-service",
"id": "fdb60d948fb0fb425d3afe26b469f9d62d3da75e",
"size": "3666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sli/acceptance-tests/traceability/trace_matrix.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "9748"
},
{
"name": "CSS",
"bytes": "112888"
},
{
"name": "Clojure",
"bytes": "37861"
},
{
"name": "CoffeeScript",
"bytes": "18305"
},
{
"name": "Groovy",
"bytes": "26568"
},
{
"name": "Java",
"bytes": "12115410"
},
{
"name": "JavaScript",
"bytes": "390822"
},
{
"name": "Objective-C",
"bytes": "490386"
},
{
"name": "Python",
"bytes": "34255"
},
{
"name": "Ruby",
"bytes": "2543748"
},
{
"name": "Shell",
"bytes": "111267"
},
{
"name": "XSLT",
"bytes": "144746"
}
],
"symlink_target": ""
} |
import yaml
from yaml import error as yaml_error
from cloudferry import cfglib
from cloudferry import config
from cloudferry.lib.utils import log
class ConfigMixin(object):
def __init__(self, app, app_args, cmd_name=None):
super(ConfigMixin, self).__init__(app, app_args, cmd_name)
self.config = None
def get_parser(self, prog_name):
parser = super(ConfigMixin, self).get_parser(prog_name)
parser.add_argument('config_path',
help='Configuration file')
return parser
def run(self, parsed_args):
self.config = self.init_config(parsed_args.config_path)
self.configure_logging()
return super(ConfigMixin, self).run(parsed_args)
def configure_logging(self, log_config=None, forward_stdout=None,
hide_ssl_warnings=None):
if self.app.interactive_mode:
forward_stdout = False
log.configure_logging(log_config, self.app.options.debug,
forward_stdout, hide_ssl_warnings)
def init_config(self, config_path):
conf = cfglib.init_config(config_path)
if self.app.options.debug:
conf.set_override('debug', self.app.options.debug, 'migrate')
return conf
class YamlConfigMixin(ConfigMixin):
def configure_logging(self, log_config=None, forward_stdout=None,
hide_ssl_warnings=None):
super(YamlConfigMixin, self).configure_logging(
log_config=log_config or 'configs/logging_config.yaml',
forward_stdout=forward_stdout or False,
hide_ssl_warnings=hide_ssl_warnings or True,
)
def init_config(self, config_path):
try:
with open(config_path, 'r') as config_file:
conf = yaml.load(config_file)
return config.load(conf)
except config.ValidationError as ex:
self.app.parser.error(ex)
except yaml_error.YAMLError as ex:
self.app.parser.error(ex)
| {
"content_hash": "4d38fa67959120f484c253e07791ca6d",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 73,
"avg_line_length": 36.410714285714285,
"alnum_prop": 0.6194212849435998,
"repo_name": "SVilgelm/CloudFerry",
"id": "ef652613f7b25be6a02731638e480d85ce1dc393",
"size": "2614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudferry/cli/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2615"
},
{
"name": "Python",
"bytes": "1718937"
},
{
"name": "Ruby",
"bytes": "2507"
},
{
"name": "Shell",
"bytes": "11910"
}
],
"symlink_target": ""
} |
try:
from django.conf.urls import patterns, include
except ImportError:
from django.conf.urls.defaults import patterns, include
urlpatterns = patterns(
"",
(r"^", include("pinax.boxes.urls")),
)
| {
"content_hash": "dd66f533c37925267b76d3ec367e7d7e",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 59,
"avg_line_length": 21.3,
"alnum_prop": 0.6901408450704225,
"repo_name": "pinax/pinax-boxes",
"id": "47a36ddd14669a14f2756aecd03a7931c63371e5",
"size": "213",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pinax/boxes/tests/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "120"
},
{
"name": "Python",
"bytes": "9670"
}
],
"symlink_target": ""
} |
import torch
from .Module import Module
class MV(Module):
"""Module to perform matrix vector multiplication on two minibatch inputs,
producing a minibatch.
"""
def __init__(self, trans=False):
super(MV, self).__init__()
self.trans = trans
self.gradInput = [torch.Tensor(), torch.Tensor()]
def updateOutput(self, input):
M, v = input
assert M.ndimension() == 2 or M.ndimension() == 3
if M.ndimension() == 2:
assert v.ndimension() == 1
if self.trans:
M = M.transpose(0, 1)
self.output.resize_(M.size(0))
torch.mv(M, v, out=self.output)
else:
assert v.ndimension() == 2
if self.trans:
M = M.transpose(1, 2)
self.output.resize_(M.size(0), M.size(1), 1)
torch.bmm(M, v.view(v.size(0), v.size(1), 1), out=self.output).resize_(M.size(0), M.size(1))
return self.output
def updateGradInput(self, input, gradOutput):
M, v = input
self.gradInput[0].resize_as_(M)
self.gradInput[1].resize_as_(v)
gradOutput = gradOutput.contiguous()
assert gradOutput.ndimension() == 1 or gradOutput.ndimension() == 2
if gradOutput.ndimension() == 2:
assert M.ndimension() == 3
assert v.ndimension() == 2
bdim = M.size(0)
odim = M.size(1)
idim = M.size(2)
if self.trans:
torch.bmm(v.view(bdim, odim, 1), gradOutput.view(bdim, 1, idim), out=self.gradInput[0])
torch.bmm(M, gradOutput.view(bdim, idim, 1), out=self.gradInput[1].view(bdim, odim, 1))
else:
torch.bmm(gradOutput.view(bdim, odim, 1), v.view(bdim, 1, idim), out=self.gradInput[0])
torch.bmm(M.transpose(1, 2), gradOutput.view(bdim, odim, 1), out=self.gradInput[1].view(bdim, idim, 1))
else:
assert M.ndimension() == 2
assert v.ndimension() == 1
if self.trans:
torch.ger(v, gradOutput, out=self.gradInput[0])
self.gradInput[1] = M * gradOutput
else:
torch.ger(gradOutput, v, out=self.gradInput[0])
self.gradInput[1] = M.t() * gradOutput
return self.gradInput
| {
"content_hash": "b504c442a357b49a6bbfc6f96cde9b42",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 119,
"avg_line_length": 35,
"alnum_prop": 0.5343283582089552,
"repo_name": "RPGOne/Skynet",
"id": "ad9ff4619e41c5ac8fcad5479d1ce870232ee92b",
"size": "2345",
"binary": false,
"copies": "1",
"ref": "refs/heads/Miho",
"path": "pytorch-master/torch/legacy/nn/MV.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "1C Enterprise",
"bytes": "36"
},
{
"name": "Ada",
"bytes": "89079"
},
{
"name": "Assembly",
"bytes": "11425802"
},
{
"name": "Batchfile",
"bytes": "123467"
},
{
"name": "C",
"bytes": "34703955"
},
{
"name": "C#",
"bytes": "55955"
},
{
"name": "C++",
"bytes": "84647314"
},
{
"name": "CMake",
"bytes": "220849"
},
{
"name": "CSS",
"bytes": "39257"
},
{
"name": "Cuda",
"bytes": "1344541"
},
{
"name": "DIGITAL Command Language",
"bytes": "349320"
},
{
"name": "DTrace",
"bytes": "37428"
},
{
"name": "Emacs Lisp",
"bytes": "19654"
},
{
"name": "Erlang",
"bytes": "39438"
},
{
"name": "Fortran",
"bytes": "16914"
},
{
"name": "HTML",
"bytes": "929759"
},
{
"name": "Java",
"bytes": "112658"
},
{
"name": "JavaScript",
"bytes": "32806873"
},
{
"name": "Jupyter Notebook",
"bytes": "1616334"
},
{
"name": "Lua",
"bytes": "22549"
},
{
"name": "M4",
"bytes": "64967"
},
{
"name": "Makefile",
"bytes": "1046428"
},
{
"name": "Matlab",
"bytes": "888"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "NSIS",
"bytes": "2860"
},
{
"name": "Objective-C",
"bytes": "131433"
},
{
"name": "PHP",
"bytes": "750783"
},
{
"name": "Pascal",
"bytes": "75208"
},
{
"name": "Perl",
"bytes": "626627"
},
{
"name": "Perl 6",
"bytes": "2495926"
},
{
"name": "PowerShell",
"bytes": "38374"
},
{
"name": "Prolog",
"bytes": "300018"
},
{
"name": "Python",
"bytes": "26363074"
},
{
"name": "R",
"bytes": "236175"
},
{
"name": "Rebol",
"bytes": "217"
},
{
"name": "Roff",
"bytes": "328366"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Scala",
"bytes": "248902"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "360815"
},
{
"name": "TeX",
"bytes": "105346"
},
{
"name": "Vim script",
"bytes": "6101"
},
{
"name": "XS",
"bytes": "4319"
},
{
"name": "eC",
"bytes": "5158"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2015, Nagoya University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Autoware nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import wx
import wx.lib.buttons
import wx.lib.agw.customtreectrl as CT
import gettext
import os
import re
import sys
import fcntl
import threading
import Queue
import time
import socket
import struct
import shlex
import signal
import subprocess
import psutil
import pty
import yaml
import datetime
import syslog
import rtmgr
import rospy
import std_msgs.msg
from std_msgs.msg import Bool
from decimal import Decimal
from autoware_msgs.msg import ConfigSsd
from autoware_msgs.msg import ConfigCarDpm
from autoware_msgs.msg import ConfigPedestrianDpm
from autoware_msgs.msg import ConfigNdt
from autoware_msgs.msg import ConfigNdtMapping
from autoware_msgs.msg import ConfigApproximateNdtMapping
from autoware_msgs.msg import ConfigNdtMappingOutput
from autoware_msgs.msg import ConfigICP
from autoware_msgs.msg import ConfigVoxelGridFilter
from autoware_msgs.msg import ConfigRingFilter
from autoware_msgs.msg import ConfigDistanceFilter
from autoware_msgs.msg import ConfigRandomFilter
from autoware_msgs.msg import ConfigRingGroundFilter
from autoware_msgs.msg import ConfigRayGroundFilter
from autoware_msgs.msg import ConfigPointsConcatFilter
from autoware_msgs.msg import ConfigWaypointLoader
from autoware_msgs.msg import ConfigWaypointFollower
from autoware_msgs.msg import ConfigTwistFilter
from autoware_msgs.msg import ConfigVelocitySet
from autoware_msgs.msg import ConfigLatticeVelocitySet
from autoware_msgs.msg import ConfigCarKf
from autoware_msgs.msg import ConfigPedestrianKf
from autoware_msgs.msg import ConfigLaneRule
from autoware_msgs.msg import ConfigLaneSelect
from autoware_msgs.msg import ConfigLaneStop
from autoware_msgs.msg import ConfigCarFusion
from autoware_msgs.msg import ConfigPedestrianFusion
from autoware_msgs.msg import ConfigPlannerSelector
from autoware_msgs.msg import ConfigDecisionMaker
from tablet_socket_msgs.msg import mode_cmd
from tablet_socket_msgs.msg import gear_cmd
from tablet_socket_msgs.msg import Waypoint
from tablet_socket_msgs.msg import route_cmd
from autoware_msgs.msg import ndt_stat
from geometry_msgs.msg import TwistStamped
from geometry_msgs.msg import Vector3
from autoware_msgs.msg import accel_cmd
from autoware_msgs.msg import steer_cmd
from autoware_msgs.msg import brake_cmd
from autoware_msgs.msg import indicator_cmd
from autoware_msgs.msg import lamp_cmd
from autoware_msgs.msg import traffic_light
from autoware_msgs.msg import adjust_xy
from types import MethodType
SCHED_OTHER = 0
SCHED_FIFO = 1
SCHED_RR = 2
PROC_MANAGER_SOCK="/tmp/autoware_proc_manager"
class MyFrame(rtmgr.MyFrame):
def __init__(self, *args, **kwds):
rtmgr.MyFrame.__init__(self, *args, **kwds)
self.all_procs = []
self.all_cmd_dics = []
self.load_dic = self.load_yaml('param.yaml', def_ret={})
self.config_dic = {}
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.params = []
self.all_tabs = []
self.all_th_infs = []
self.log_que = Queue.Queue()
self.log_que_stdout = Queue.Queue()
self.log_que_stderr = Queue.Queue()
self.log_que_show = Queue.Queue()
#
# ros
#
rospy.init_node('runime_manager', anonymous=True)
rospy.Subscriber('to_rtmgr', std_msgs.msg.String, self.RosCb)
self.pub = rospy.Publisher('from_rtmgr', std_msgs.msg.String, queue_size=10)
#
# for Quick Start tab
#
tab = self.tab_qs
self.all_tabs.append(tab)
self.qs_cmd = {}
self.all_cmd_dics.append(self.qs_cmd)
self.qs_dic = self.load_yaml('qs.yaml')
self.add_params(self.qs_dic.get('params', []))
self.setup_buttons(self.qs_dic.get('buttons', {}), self.qs_cmd)
for nm in [ 'map', 'sensing', 'localization', 'detection', 'mission_planning', 'motion_planning' ]:
for key in self.qs_dic.get('exec_time', {}).get(nm, {}).keys():
(topic, msg, attr) = ( key.split('.') + [ None, None, None ] )[:3]
msg = globals().get(msg)
msg = msg if msg else std_msgs.msg.Float32
attr = attr if attr else 'data'
rospy.Subscriber(topic, msg, self.exec_time_callback, callback_args=(key, attr))
#
# for Setup tab
#
tab = self.tab_setup
self.all_tabs.append(tab)
setup_cmd = {}
self.all_cmd_dics.append(setup_cmd)
dic = self.load_yaml('setup.yaml')
self.add_params(dic.get('params', []))
self.setup_buttons(dic.get('buttons', {}), setup_cmd)
#
# for Map tab
#
tab = self.tab_map
self.all_tabs.append(tab)
self.map_cmd = {}
self.all_cmd_dics.append(self.map_cmd)
self.map_dic = self.load_yaml('map.yaml')
self.add_params(self.map_dic.get('params', []))
self.setup_buttons(self.map_dic.get('buttons', {}), self.map_cmd)
self.tc_point_cloud = self.obj_to_varpanel_tc(self.button_point_cloud, 'path_pcd')
self.tc_area_list = self.obj_to_varpanel_tc(self.button_area_lists, 'path_area_list')
self.label_point_cloud_bar.Destroy()
self.label_point_cloud_bar = BarLabel(tab, ' Loading... ')
self.label_point_cloud_bar.Enable(False)
def hook1G(args):
for f in args.get('func')().split(','):
sz = os.path.getsize(f)
if sz > 1024*1024*1024:
wx.MessageBox("Over 1GB\n\n{}\n({:,})".format(f, sz), caption='Warning')
args = { 'func':self.tc_point_cloud.GetValue }
hook_var = { 'hook':hook1G, 'args':args, 'flags':['every_time'] }
obj = self.button_point_cloud
gdic_v = self.obj_to_gdic(obj, {}).get('path_pcd', {})
gdic_v['hook_var'] = hook_var
#
# for Sensing tab
#
tab = self.tab_sensing
self.all_tabs.append(tab)
self.drv_probe_cmd = {}
self.sensing_cmd = {}
self.all_cmd_dics.append(self.sensing_cmd)
dic = self.load_yaml('sensing.yaml')
self.add_params(dic.get('params', []))
self.create_checkboxes(dic, self.panel_sensing, None, self.drv_probe_cmd, self.sensing_cmd, self.OnSensingDriver)
self.setup_buttons(dic.get('buttons', {}), self.sensing_cmd)
#self.timer = wx.Timer(self)
#self.Bind(wx.EVT_TIMER, self.OnProbe, self.timer)
#self.probe_interval = 10*1000
#if self.checkbox_auto_probe.GetValue():
# self.OnProbe(None)
# self.timer.Start(self.probe_interval)
self.dlg_rosbag_record = MyDialogRosbagRecord(self, cmd_dic=self.sensing_cmd)
buttons_color_hdr_setup(self.dlg_rosbag_record)
sense_cmds_dic = dic.get('cmds', {})
#
# for Computing tab
#
tab = self.tab_computing
self.all_tabs.append(tab)
parent = self.tree_ctrl_0.GetParent()
for i in range(2):
self.obj_get('tree_ctrl_' + str(i)).Destroy()
items = self.load_yaml('computing.yaml')
self.add_params(items.get('params', []))
self.sys_gdic = items.get('sys_gui')
self.sys_gdic['update_func'] = self.update_func
self.computing_cmd = {}
self.all_cmd_dics.append(self.computing_cmd)
for i in range(2):
tree_ctrl = self.create_tree(parent, items['subs'][i], None, None, self.computing_cmd)
tree_ctrl.ExpandAll()
tree_ctrl.SetBackgroundColour(wx.NullColour)
setattr(self, 'tree_ctrl_' + str(i), tree_ctrl)
self.Bind(CT.EVT_TREE_ITEM_CHECKED, self.OnTreeChecked)
self.setup_buttons(items.get('buttons', {}), self.computing_cmd)
#
# for Sensing tab (cmds)
#
parent = self.tree_ctrl_sense.GetParent()
self.tree_ctrl_sense.Destroy()
tree_ctrl = self.create_tree(parent, sense_cmds_dic, None, None, self.sensing_cmd)
tree_ctrl.ExpandAll()
tree_ctrl.SetBackgroundColour(wx.NullColour)
self.tree_ctrl_sense = tree_ctrl
#
# for Interface tab
#
tab = self.tab_interface
self.all_tabs.append(tab)
self.interface_cmd = {}
self.all_cmd_dics.append(self.interface_cmd)
self.interface_dic = self.load_yaml('interface.yaml')
self.add_params(self.interface_dic.get('params', []))
self.setup_buttons(self.interface_dic.get('buttons', {}), self.interface_cmd)
self.setup_buttons(self.interface_dic.get('checkboxs', {}), self.interface_cmd)
szr = wx.BoxSizer(wx.VERTICAL)
for cc in self.interface_dic.get('control_check', []):
pdic = {}
prm = self.get_param(cc.get('param'))
for var in prm['vars']:
pdic[ var['name'] ] = var['v']
gdic = self.gdic_get_1st(cc)
panel = ParamPanel(self.panel_interface_cc, frame=self, pdic=pdic, gdic=gdic, prm=prm)
szr.Add(panel, 0, wx.EXPAND)
self.panel_interface_cc.SetSizer(szr)
#
# for Database tab
#
tab = self.tab_database
self.all_tabs.append(tab)
self.data_cmd = {}
self.all_cmd_dics.append(self.data_cmd)
dic = self.load_yaml('data.yaml')
self.add_params(dic.get('params', []))
parent = self.tree_ctrl_data.GetParent()
self.tree_ctrl_data.Destroy()
tree_ctrl = self.create_tree(parent, dic, None, None, self.data_cmd)
tree_ctrl.ExpandAll()
tree_ctrl.SetBackgroundColour(wx.NullColour)
self.tree_ctrl_data = tree_ctrl
#self.setup_config_param_pdic()
if 'buttons' in dic:
self.setup_buttons(dic['buttons'], self.data_cmd)
#
# for Simulation Tab
#
tab = self.tab_simulation
self.all_tabs.append(tab)
self.simulation_cmd = {}
self.all_cmd_dics.append(self.simulation_cmd)
dic = self.load_yaml('simulation.yaml')
self.add_params(dic.get('params', []))
self.setup_buttons(dic.get('buttons'), self.simulation_cmd)
btn = self.button_play_rosbag_play
# setup for rosbag info
gdic = self.obj_to_gdic(btn, {})
gdic_v = dic_getset(gdic, 'file', {})
gdic_v['update_hook'] = self.rosbag_info_hook
tc = self.obj_to_varpanel_tc(btn, 'file')
if tc:
self.rosbag_info_hook( tc.GetValue() )
#vp = self.obj_to_varpanel(btn, 'sim_time')
#self.checkbox_sim_time = vp.obj
#try:
# cmd = ['rosparam', 'get', '/use_sim_time']
# if subprocess.check_output(cmd, stderr=open(os.devnull, 'wb')).strip() == 'true':
# self.checkbox_sim_time.SetValue(True)
#except subprocess.CalledProcessError:
# pass
self.label_rosbag_play_bar.Destroy()
self.label_rosbag_play_bar = BarLabel(tab, ' Playing... ')
self.label_rosbag_play_bar.Enable(False)
#
# for Status tab
#
tab = self.tab_status
self.all_tabs.append(tab)
self.status_cmd = {}
self.all_cmd_dics.append(self.status_cmd)
self.status_dic = self.load_yaml('status.yaml')
self.add_params(self.status_dic.get('params', []))
self.setup_buttons(self.status_dic.get('buttons', {}), self.status_cmd)
font = wx.Font(10, wx.FONTFAMILY_MODERN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
self.label_top_cmd.SetFont(font)
#
# for Topics tab
#
tab = self.tab_topics
self.all_tabs.append(tab)
#
# for State tab
#
tab = self.tab_states
self.all_tabs.append(tab)
self.state_dic = self.load_yaml('state.yaml')
self.mainstate_dic = self.state_dic["mainstate"]
self.substate_dic = self.state_dic["substate"]
#
# for All
#
self.bitmap_logo.Destroy()
bm = scaled_bitmap(wx.Bitmap(rtmgr_src_dir() + 'images/autoware_logo_1.png'), 0.2)
self.bitmap_logo = wx.StaticBitmap(self, wx.ID_ANY, bm)
rtmgr.MyFrame.__do_layout(self)
cond = lambda s : s.startswith('tab_')
self.tab_names = [ self.name_get_cond(tab, cond=cond, def_ret='').replace('tab_', '', 1) for tab in self.all_tabs ]
new_btn_grps = ( lambda btn_names, tab_names=self.tab_names :
[ [ self.obj_get('button_{}_{}'.format(bn, tn)) for tn in tab_names ] for bn in btn_names ] )
self.alias_grps = new_btn_grps( ('rosbag', 'rviz', 'rqt') )
self.alias_grps += new_btn_grps( ('android_tablet', 'oculus_rift', 'vehicle_gateway', 'remote_control', 'auto_pilot'),
('qs', 'interface') )
for grp in self.alias_grps:
wx.CallAfter(self.alias_sync, get_top(grp))
s = get_tooltip_obj(grp[0])
if s:
for obj in grp[1:]:
set_tooltip_str(obj, s)
# Topics tab (need, after layout for sizer)
self.topics_dic = self.load_yaml('topics.yaml')
self.topics_list = []
self.topics_echo_curr_topic = None
self.topics_echo_proc = None
self.topics_echo_thinf = None
self.topics_echo_que = Queue.Queue()
self.topics_echo_sum = 0
thinf = th_start(self.topics_echo_show_th)
self.all_th_infs.append(thinf)
self.refresh_topics_list()
# waypoint
self.route_cmd_waypoint = [ Waypoint(0,0), Waypoint(0,0) ]
rospy.Subscriber('route_cmd', route_cmd, self.route_cmd_callback)
# topic /xxx_stat
self.stat_dic = {}
for k in [ 'gnss', 'pmap', 'vmap', 'lf' ]:
self.stat_dic[k] = False
name = k + '_stat'
rospy.Subscriber(name, std_msgs.msg.Bool, self.stat_callback, callback_args=k)
# top command thread setup
toprc = os.path.expanduser('~/.toprc')
backup = os.path.expanduser('~/.toprc-autoware-backup')
self.toprc_setup(toprc, backup)
cpu_ibls = [ InfoBarLabel(self, 'CPU'+str(i)) for i in range(get_cpu_count())]
sz = sizer_wrap(cpu_ibls, wx.HORIZONTAL, 1, wx.EXPAND, 0)
self.sizer_cpuinfo.Add(sz, 8, wx.ALL | wx.EXPAND, 4)
self.lb_top5 = []
for i in range(5):
lb = wx.StaticText(self, wx.ID_ANY, '')
change_font_point_by_rate(lb, 0.75)
self.lb_top5.append(lb)
line = wx.StaticLine(self, wx.ID_ANY)
ibl = InfoBarLabel(self, 'Memory', bar_orient=wx.HORIZONTAL)
szr = sizer_wrap(self.lb_top5 + [ line, ibl ], flag=wx.EXPAND | wx.FIXED_MINSIZE)
self.sizer_cpuinfo.Add(szr, 2, wx.ALL | wx.EXPAND, 4)
th_arg = { 'setting':self.status_dic.get('top_cmd_setting', {}),
'cpu_ibls':cpu_ibls, 'mem_ibl':ibl,
'toprc':toprc, 'backup':backup }
thinf = th_start(self.top_cmd_th, th_arg)
self.all_th_infs.append(thinf)
# ps command thread
#thinf = th_start(self.ps_cmd_th, { 'interval':5 })
#self.all_th_infs.append(thinf)
# logout thread
interval = self.status_dic.get('gui_update_interval_ms', 100) * 0.001
tc = self.text_ctrl_stdout
thinf = th_start(self.logout_th, { 'que':self.log_que_stdout, 'interval':interval, 'tc':tc } )
self.all_th_infs.append(thinf)
thinf = th_start(self.logout_th, { 'que':self.log_que_stderr, 'interval':interval, 'tc':tc } )
self.all_th_infs.append(thinf)
thinf = th_start(self.logout_th, { 'que':self.log_que, 'interval':interval, 'tc':tc } )
self.all_th_infs.append(thinf)
if interval > 0:
thinf = th_start(self.logshow_th, { 'que':self.log_que_show , 'interval':interval , 'tc':tc })
self.all_th_infs.append(thinf)
else:
self.checkbox_stdout.Enable(False)
tc.Enable(False)
# mkdir
paths = [ os.environ['HOME'] + '/.autoware/data/tf',
os.environ['HOME'] + '/.autoware/data/map/pointcloud_map',
os.environ['HOME'] + '/.autoware/data/map/vector_map' ]
for path in paths:
if not os.path.exists(path):
subprocess.call([ 'mkdir', '-p', path ])
# icon
bm = scaled_bitmap(wx.Bitmap(rtmgr_src_dir() + 'images/autoware_logo_2_white.png'), 0.5)
icon = wx.EmptyIcon()
icon.CopyFromBitmap(bm)
self.SetIcon(icon)
wx.CallAfter( self.boot_booted_cmds )
def __do_layout(self):
pass
def boot_booted_cmds(self):
if not self.load_dic.get('booted_cmds', {}).get('enable', False):
return
names = self.load_dic.get('booted_cmds', {}).get('names', [])
lst = [ ( name, self.cfg_dic( { 'name': name } ).get('obj') ) for name in names ]
lst = [ (name, obj) for (name, obj) in lst if obj ]
if not lst:
return
choices = [ obj.GetLabel() if hasattr(obj, 'GetLabel') else name for (name, obj) in lst ]
dlg = wx.MultiChoiceDialog(self, 'boot command ?', '', choices)
dlg.SetSelections( range( len(names) ) )
if dlg.ShowModal() != wx.ID_OK:
return
for i in dlg.GetSelections():
(_, obj) = lst[i]
post_evt_toggle_obj(self, obj, True)
def OnClose(self, event):
if self.quit_select() != 'quit':
return
# kill_all
for proc in self.all_procs[:]: # copy
(_, obj) = self.proc_to_cmd_dic_obj(proc)
self.launch_kill(False, 'dmy', proc, obj=obj)
shutdown_proc_manager()
shutdown_sh = self.get_autoware_dir() + '/ros/shutdown'
if os.path.exists(shutdown_sh):
os.system(shutdown_sh)
for thinf in self.all_th_infs:
th_end(thinf)
self.Destroy()
def quit_select(self):
def timer_func():
if self.quit_timer:
self.quit_timer = 'timeout'
evt = wx.PyCommandEvent( wx.EVT_CLOSE.typeId, self.GetId() )
wx.PostEvent(self, evt)
if not hasattr(self, 'quit_timer') or not self.quit_timer:
self.quit_timer = threading.Timer(2.0, timer_func)
self.quit_timer.start()
return 'not quit'
if self.quit_timer == 'timeout':
self.save_param_yaml()
return 'quit'
self.quit_timer.cancel()
self.quit_timer = None
lst = [
( 'Save and Quit', [ 'save', 'quit' ] ),
( 'Save to param.yaml', [ 'save' ] ),
( 'Quit without saving', [ 'quit' ] ),
( 'Reload computing.yaml', [ 'reload' ] ),
( self.get_booted_cmds_enable_msg()[1], [ 'toggle_booted_cmds' ] ),
]
choices = [ s for (s, _) in lst ]
dlg = wx.SingleChoiceDialog(self, 'select command', '', choices)
if dlg.ShowModal() != wx.ID_OK:
return 'not quit'
i = dlg.GetSelection() # index of choices
(_, f) = lst[i]
if 'save' in f:
self.save_param_yaml()
if 'reload' in f:
self.reload_computing_yaml()
if 'toggle_booted_cmds' in f:
self.toggle_booted_cmds()
return 'quit' if 'quit' in f else 'not quit'
def save_param_yaml(self):
save_dic = {}
for (name, pdic) in self.load_dic.items():
if pdic and pdic != {}:
prm = self.cfg_dic( {'name':name, 'pdic':pdic} ).get('param', {})
no_saves = prm.get('no_save_vars', [])
pdic = pdic.copy()
for k in pdic.keys():
if k in no_saves:
del pdic[k]
save_dic[name] = pdic
names = []
for proc in self.all_procs:
(_, obj) = self.proc_to_cmd_dic_obj(proc)
name = self.cfg_dic( { 'obj': obj } ).get('name')
names.append(name)
if 'booted_cmds' not in save_dic:
save_dic['booted_cmds'] = {}
save_dic.get('booted_cmds')['names'] = names
if save_dic != {}:
dir = rtmgr_src_dir()
print('saving param.yaml')
f = open(dir + 'param.yaml', 'w')
s = yaml.dump(save_dic, default_flow_style=False)
#print 'save\n', s # for debug
f.write(s)
f.close()
def reload_computing_yaml(self):
parent = self.tree_ctrl_0.GetParent()
sizer = self.tree_ctrl_0.GetContainingSizer()
items = self.load_yaml('computing.yaml')
# backup cmd_dic proc
cmd_dic = self.computing_cmd
to_name = lambda obj: next( ( d.get('name') for d in self.config_dic.values() if d.get('obj') == obj ), None )
procs = [ ( to_name(obj), proc ) for (obj, (cmd, proc)) in cmd_dic.items() if proc ]
# remove old tree ctrl
for i in range(2):
self.obj_get('tree_ctrl_' + str(i)).Destroy()
# remove old params
names = [ prm.get('name') for prm in items.get('params', []) ]
for prm in self.params[:]: # copy
if prm.get('name') in names:
self.params.remove(prm)
self.add_params(items.get('params', []))
# overwrite sys_gdic
old = self.sys_gdic
self.sys_gdic = items.get('sys_gui')
self.sys_gdic['update_func'] = self.update_func
for d in self.config_dic.values():
if d.get('gdic') == old:
d['gdic'] = self.sys_gdic
# listing update names
def subs_names(subs):
f2 = lambda s: subs_names( s.get('subs') ) if 'subs' in s else [ s.get('name') ]
f = lambda lst, s: lst + f2(s)
return reduce(f, subs, [])
names = subs_names( items.get('subs') )
names += items.get('buttons', {}).keys()
# remove old data of name in config_dic
for (k, v) in self.config_dic.items():
if v.get('name') in names:
self.config_dic.pop(k, None)
# rebuild tree ctrl
cmd_dic.clear()
for i in range(2):
tree_ctrl = self.create_tree(parent, items['subs'][i], None, None, self.computing_cmd)
tree_ctrl.ExpandAll()
tree_ctrl.SetBackgroundColour(wx.NullColour)
setattr(self, 'tree_ctrl_' + str(i), tree_ctrl)
sizer.Add(tree_ctrl, 1, wx.EXPAND, 0)
self.setup_buttons(items.get('buttons', {}), self.computing_cmd)
# restore cmd_dic proc
to_obj = lambda name: next( ( d.get('obj') for d in self.config_dic.values() if d.get('name') == name ), None )
for (name, proc) in procs:
obj = to_obj(name)
if obj and obj in cmd_dic:
cmd_dic[ obj ] = ( cmd_dic.get(obj)[0], proc )
set_val(obj, True)
parent.Layout()
def toggle_booted_cmds(self):
(enable, msg) = self.get_booted_cmds_enable_msg()
style = wx.OK | wx.CANCEL | wx.ICON_QUESTION
dlg = wx.MessageDialog(self, msg, '', style)
if dlg.ShowModal() != wx.ID_OK:
return
if 'booted_cmds' not in self.load_dic:
self.load_dic['booted_cmds'] = {}
self.load_dic.get('booted_cmds')['enable'] = not enable
def get_booted_cmds_enable_msg(self):
enable = self.load_dic.get('booted_cmds', {}).get('enable', False)
s = 'Enable' if not enable else 'Disable'
msg = '{} booted commands menu ?'.format(s)
return (enable, msg)
def RosCb(self, data):
print('recv topic msg : ' + data.data)
r = rospy.Rate(10)
rospy.is_shutdown()
r.sleep()
self.pub.publish(data.data)
r.sleep()
def setup_buttons(self, d, run_dic):
for (k,d2) in d.items():
pfs = [ 'button_', 'checkbox_' ]
obj = next( (self.obj_get(pf+k) for pf in pfs if self.obj_get(pf+k)), None)
if not obj:
s = 'button_' + k
obj = StrValObj(s, False)
setattr(self, s, obj)
if not d2 or type(d2) is not dict:
continue
if 'run' in d2:
run_dic[obj] = (d2['run'], None)
set_tooltip(obj, d2)
gdic = self.gdic_get_1st(d2)
if 'param' in d2:
pdic = self.load_dic_pdic_setup(k, d2)
prm = self.get_param(d2.get('param'))
for var in prm.get('vars'):
name = var.get('name')
if name not in pdic and 'v' in var:
pdic[name] = var.get('v')
for (name, v) in pdic.items():
restore = eval( gdic.get(name, {}).get('restore', 'lambda a : None') )
restore(v)
self.add_cfg_info(obj, obj, k, pdic, gdic, False, prm)
pnls = [ gdic.get(var.get('name'), {}).get('panel') for var in prm.get('vars') ]
for pnl in [ gdic.get('panel') ] + pnls:
if pnl:
self.set_param_panel(obj, eval_if_str(self, pnl))
else:
self.add_cfg_info(obj, obj, k, None, gdic, False, None)
def OnGear(self, event):
grp = { self.button_statchk_d : 1,
self.button_statchk_r : 2,
self.button_statchk_b : 3,
self.button_statchk_n : 4,
self.button_statchk_p : 5 }
self.radio_action(event, grp.keys())
v = grp.get(event.GetEventObject())
if v is not None:
pub = rospy.Publisher('gear_cmd', gear_cmd, queue_size=10)
pub.publish(gear_cmd(gear=v))
def OnLamp(self, event):
pub = rospy.Publisher('lamp_cmd', lamp_cmd, queue_size=10)
msg = lamp_cmd()
msg.l = self.button_statchk_lamp_l.GetValue()
msg.r = self.button_statchk_lamp_r.GetValue()
pub.publish(msg)
def OnIndi(self, event):
pub = rospy.Publisher('indicator_cmd', indicator_cmd, queue_size=10)
msg = indicator_cmd()
msg.l = self.button_statchk_indi_l.GetValue()
msg.r = self.button_statchk_indi_r.GetValue()
pub.publish(msg)
def OnAutoPilot(self, event):
obj = event.GetEventObject()
self.alias_sync(obj)
v = obj.GetValue()
pub = rospy.Publisher('mode_cmd', mode_cmd, queue_size=10)
pub.publish(mode_cmd(mode=v))
def radio_action(self, event, grp):
push = event.GetEventObject()
for b in grp:
v = b.GetValue()
act = None
act = True if b is push and not v else act
act = False if b is not push and v else act
if act is not None:
set_val(b, act)
def stat_label_off(self, obj):
qs_nms = [ 'map', 'sensing', 'localization', 'detection', 'mission_planning', 'motion_planning' ]
exec_time = self.qs_dic.get('exec_time', {})
gdic = self.obj_to_gdic(obj, {})
msg = std_msgs.msg.Bool(False)
for k in gdic.get('stat_topic', []):
# exec_time off
if next( (dic for dic in exec_time.values() if k in dic), None):
self.exec_time_callback(std_msgs.msg.Float32(0), (k, 'data'))
else:
self.stat_callback(msg, k)
# Quick Start tab, exec_time off
obj_nm = self.name_get(obj)
nm = next( (nm for nm in qs_nms if 'button_' + nm + '_qs' == obj_nm), None)
for key in exec_time.get(nm, {}):
self.exec_time_callback(std_msgs.msg.Float32(0), (key, 'data'))
def route_cmd_callback(self, data):
self.route_cmd_waypoint = data.point
def stat_callback(self, msg, k):
self.stat_dic[k] = msg.data
if k == 'pmap':
v = self.stat_dic.get(k)
wx.CallAfter(self.label_point_cloud.SetLabel, 'OK' if v else '')
if k in [ 'pmap', 'vmap' ]:
v = self.stat_dic.get('pmap') and self.stat_dic.get('vmap')
wx.CallAfter(self.label_map_qs.SetLabel, 'OK' if v else '')
def exec_time_callback(self, msg, (key, attr)):
msec = int(getattr(msg, attr, 0))
exec_time = self.qs_dic.get('exec_time', {})
(nm, dic) = next( ( (nm, dic) for (nm, dic) in exec_time.items() if key in dic), None)
dic[ key ] = msec
lb = self.obj_get('label_' + nm + '_qs')
if lb:
sum = reduce( lambda a,b:a+(b if b else 0), dic.values(), 0 )
wx.CallAfter(lb.SetLabel, str(sum)+' ms' if sum > 0 else '')
# update Status tab
lb = ''
for nm in [ 'map', 'sensing', 'localization', 'detection', 'mission_planning', 'motion_planning' ]:
dic = exec_time.get(nm, {})
sum = reduce( lambda a,b:a+(b if b else 0), dic.values(), 0 )
if sum > 0:
s = nm + ' : ' + str(sum) + ' ms'
lb += s + '\n'
wx.CallAfter(self.label_node_time.SetLabel, lb)
wx.CallAfter(self.label_node_time.GetParent().FitInside)
#
# Setup tab
#
def OnSetupLocalizer(self, event):
obj = self.button_setup_tf
(pdic, gdic, prm) = self.obj_to_pdic_gdic_prm(obj)
self.update_func(pdic, gdic, prm)
#
# Computing Tab
#
def OnTreeMotion(self, event):
tree = event.GetEventObject()
pt = event.GetPosition()
event.Skip()
(item, flags) = tree.HitTest(pt)
if flags & CT.TREE_HITTEST_ONITEMLABEL == 0:
return
text = item.GetData()
if not text:
return
x = item.GetX()
y = item.GetY()
w = item.GetWidth()
h = item.GetHeight()
(x, y) = tree.CalcScrolledPosition(x, y)
iw = tree.GetItemWindow(item)
w -= iw.GetSize()[0] if iw else 0
if not wx.Rect(x, y, w, h).Contains(pt):
return
(x, y) = tree.ClientToScreen((x, y))
self.tip_info = (tree, text, wx.Rect(x, y, w, h))
if getattr(self, 'tip_timer', None) is None:
self.tip_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnTipTimer, self.tip_timer)
self.tip_timer.Start(200, oneShot=True)
def OnTipTimer(self, event):
if getattr(self, 'tip_info', None):
(tree, text, rect) = self.tip_info
(w, h) = self.GetSize()
wx.TipWindow(tree, text, maxLength=w, rectBound=rect)
def OnTreeChecked(self, event):
self.OnChecked_obj(event.GetItem())
def OnChecked_obj(self, obj):
self.OnLaunchKill_obj(obj)
def OnHyperlinked(self, event):
self.OnHyperlinked_obj(event.GetEventObject())
def OnHyperlinked_obj(self, obj):
(pdic, gdic, prm) = self.obj_to_pdic_gdic_prm(obj)
if pdic is None or prm is None:
return
dic_list_push(gdic, 'dialog_type', 'config')
klass_dlg = globals().get(gdic_dialog_name_get(gdic), MyDialogParam)
dlg = klass_dlg(self, pdic=pdic, gdic=gdic, prm=prm)
show_modal(dlg)
dic_list_pop(gdic, 'dialog_type')
def obj_to_add_args(self, obj, msg_box=True):
(pdic, gdic, prm) = self.obj_to_pdic_gdic_prm(obj)
if pdic is None or prm is None:
return None
if 'need_camera_info' in gdic.get('flags', []) and msg_box:
ids = self.camera_ids()
if ids:
var = self.get_var(prm, 'camera_id', {})
var['choices'] = ids
dic_list_push(gdic, 'dialog_type', 'sel_cam')
klass_dlg = globals().get(gdic_dialog_name_get(gdic), MyDialogParam)
dlg = klass_dlg(self, pdic=pdic, gdic=gdic, prm=prm)
dlg_ret = show_modal(dlg)
dic_list_pop(gdic, 'dialog_type')
if dlg_ret != 0:
return False
else:
pdic['camera_id'] = ''
if 'open_dialog' in gdic.get('flags', []) and msg_box:
dic_list_push(gdic, 'dialog_type', 'open')
klass_dlg = globals().get(gdic_dialog_name_get(gdic), MyDialogParam)
dlg = klass_dlg(self, pdic=pdic, gdic=gdic, prm=prm)
dlg_ret = show_modal(dlg)
dic_list_pop(gdic, 'dialog_type')
if dlg_ret != 0:
return False
self.update_func(pdic, gdic, prm)
s = ''
vars = []
for var in prm.get('vars'):
cmd_param = var.get('cmd_param')
if cmd_param:
vars.append(var)
for var in vars[:]: # copy
cmd_param = var.get('cmd_param')
if cmd_param.get('tail'):
vars.remove(var)
vars.append(var)
for var in vars[:]: # copy
name = var.get('name')
flags = gdic.get(name, {}).get('flags', [])
if 'hide' in flags or 'disable' in flags:
vars.remove(var)
for var in vars:
cmd_param = var.get('cmd_param')
name = var.get('name')
v = pdic.get(name)
if (v is None or v == '') and 'default' in cmd_param:
v = cmd_param.get('default')
if dic_eval_if_str(self, cmd_param, 'must') and (v is None or v == ''):
print 'cmd_param', name, 'is required'
if msg_box:
wx.MessageBox('cmd_param ' + name + ' is required')
return False
if dic_eval_if_str(self, cmd_param, 'only_enable') and not v:
continue
if dic_eval_if_str(self, cmd_param, 'only_disable') and v:
continue
name = cmd_param.get('var_name', name)
unpack = cmd_param.get('unpack')
if unpack is not None:
v = ' '.join( v.split(unpack) )
add = ''
dash = cmd_param.get('dash')
if dash is not None:
add += dash + name
delim = cmd_param.get('delim')
if delim is not None:
str_v = str(v)
if var.get('kind') is None:
str_v = adjust_num_str(str_v)
if var.get('kind') == 'path':
str_v = path_expand_cmd(str_v)
str_v = os.path.expandvars(os.path.expanduser(str_v))
relpath_from = var.get('relpath_from')
if relpath_from:
relpath_from = path_expand_cmd(relpath_from)
relpath_from = os.path.expandvars(os.path.expanduser(relpath_from))
str_v = os.path.relpath(str_v, relpath_from)
add += delim + str_v
if add != '':
s += add + ' '
return s.strip(' ').split(' ') if s != '' else None
def obj_to_pdic_gdic_prm(self, obj, sys=False):
info = self.config_dic.get(obj)
if info is None:
sys_prm = self.get_param('sys')
prm_chk = lambda prm : prm is sys_prm if sys else prm is not sys_prm
info = next( ( v for v in self.config_dic.values() if v.get('obj') is obj and prm_chk(v.get('param')) ), None)
if info is None:
return (None, None, None)
pdic = info.get('pdic')
prm = info.get('param')
gdic = info.get('gdic')
return (pdic, gdic, prm)
def obj_to_gdic(self, obj, def_ret=None):
(_, gdic, _) = self.obj_to_pdic_gdic_prm(obj) if obj else (None, None, None)
return gdic if gdic else def_ret
def cfg_obj_dic(self, arg_dic, sys=False, def_ret=(None,{})):
sys_prm = self.get_param('sys')
prm_chk = {
True : (lambda prm : prm is sys_prm),
False : (lambda prm : prm is not sys_prm),
None : (lambda prm : True) }.get(sys)
arg_dic_chk = lambda dic: all( [ dic.get(k) == v for (k,v) in arg_dic.items() ] )
return next( ( (cfg_obj, dic) for (cfg_obj, dic) in self.config_dic.items() \
if arg_dic_chk(dic) and prm_chk(dic.get('param')) ), def_ret)
def cfg_dic(self, arg_dic, sys=False, def_ret={}):
(_, dic) = self.cfg_obj_dic(arg_dic, sys=sys, def_ret=(None, def_ret))
return dic
def cfg_prm_to_obj(self, arg_dic, sys=False):
return self.cfg_dic(arg_dic, sys=sys).get('obj')
def name_to_pdic_gdic_prm(self, name, sys=False):
d = self.cfg_dic( {'name':name}, sys=sys )
return ( d.get('pdic'), d.get('gdic'), d.get('param') )
def update_func(self, pdic, gdic, prm):
for var in prm.get('vars', []):
name = var.get('name')
gdic_v = gdic.get(name, {})
func = gdic_v.get('func')
if func is None and name in pdic:
continue
v = var.get('v')
if func is not None:
v = eval(func) if type(func) is str else func()
pdic[ name ] = v
hook = gdic_v.get('update_hook')
if hook:
hook(v)
hook_var = gdic_v.get('hook_var', {})
every_time = 'every_time' in hook_var.get('flags', [])
if var == gdic.get('update_func_arg_var') or every_time:
hook = hook_var.get('hook')
if hook:
hook(hook_var.get('args', {}))
if 'pub' in prm:
self.publish_param_topic(pdic, prm)
self.rosparam_set(pdic, prm)
self.update_depend_enable(pdic, gdic, prm)
d = self.cfg_dic( {'pdic':pdic, 'gdic':gdic, 'param':prm}, sys=True )
self.update_proc_cpu(d.get('obj'), d.get('pdic'), d.get('param'))
def update_proc_cpu(self, obj, pdic=None, prm=None):
if obj is None or not obj.GetValue():
return
(_, _, proc) = self.obj_to_cmd_dic_cmd_proc(obj)
if proc is None:
return
if pdic is None or prm is None:
(pdic, _, prm) = self.obj_to_pdic_gdic_prm(obj, sys=True)
cpu_chks = self.param_value_get(pdic, prm, 'cpu_chks')
cpu_chks = cpu_chks if cpu_chks else [ True for i in range(get_cpu_count()) ]
cpus = [ i for i in range(get_cpu_count()) if cpu_chks[i] ]
nice = self.param_value_get(pdic, prm, 'nice', 0)
d = { 'OTHER':SCHED_OTHER, 'FIFO':SCHED_FIFO, 'RR':SCHED_RR }
policy = SCHED_OTHER
priority = 0
if self.param_value_get(pdic, prm, 'real_time', False):
policy = d.get(self.param_value_get(pdic, prm, 'policy', 'FIFO'), SCHED_FIFO)
priority = self.param_value_get(pdic, prm, 'prio', 0)
procs = [ proc ] + get_proc_children(proc, r=True)
for proc in procs:
print 'pid={}'.format(proc.pid)
if get_proc_nice(proc) != nice:
print 'nice {} -> {}'.format(get_proc_nice(proc), nice)
if set_process_nice(proc, nice) is False:
print 'Err set_process_nice()'
if get_proc_cpu_affinity(proc) != cpus:
print 'cpus {} -> {}'.format(get_proc_cpu_affinity(proc), cpus)
if set_process_cpu_affinity(proc, cpus) is False:
print 'Err set_process_cpu_affinity()'
policy_str = next( (k for (k,v) in d.items() if v == policy), '?')
print 'sched policy={} prio={}'.format(policy_str, priority)
if set_scheduling_policy(proc, policy, priority) is False:
print 'Err scheduling_policy()'
def param_value_get(self, pdic, prm, name, def_ret=None):
def_ret = self.param_default_value_get(prm, name, def_ret)
return pdic.get(name, def_ret) if pdic else def_ret
def param_default_value_get(self, prm, name, def_ret=None):
return next( (var.get('v') for var in prm.get('vars') if var.get('name') == name ), def_ret) \
if prm else def_ret
def update_depend_enable(self, pdic, gdic, prm):
for var in prm.get('vars', []):
name = var.get('name')
gdic_v = gdic.get(name, {})
depend = gdic_v.get('depend')
if depend is None:
continue
vp = gdic_v.get('var')
if vp is None:
continue
v = pdic.get(depend)
if v is None:
continue
depend_bool = eval( gdic_v.get('depend_bool', 'lambda v : bool(v)') )
v = depend_bool(v)
enables_set(vp, 'depend', v)
def publish_param_topic(self, pdic, prm):
pub = prm['pub']
klass_msg = globals()[ prm['msg'] ]
msg = klass_msg()
for (name, v) in pdic.items():
if prm.get('topic') == '/twist_cmd' and name == 'twist.angular.z':
v = -v
(obj, attr) = msg_path_to_obj_attr(msg, name)
if obj and attr in obj.__slots__:
type_str = obj._slot_types[ obj.__slots__.index(attr) ]
setattr(obj, attr, str_to_rosval(v, type_str, v))
if 'stamp' in prm.get('flags', []):
(obj, attr) = msg_path_to_obj_attr(msg, 'header.stamp')
setattr(obj, attr, rospy.get_rostime())
pub.publish(msg)
def rosparam_set(self, pdic, prm):
rosparams = None
for var in prm.get('vars', []):
name = var['name']
if 'rosparam' not in var or name not in pdic:
continue
rosparam = var['rosparam']
v = pdic.get(name)
v = str(v)
cvdic = { 'True':'true', 'False':'false' }
if v in cvdic:
v = cvdic.get(v)
if rosparams is None:
cmd = [ 'rosparam', 'list' ]
rosparams = subprocess.check_output(cmd).strip().split('\n')
nm = rosparam
nm = ('/' if len(nm) > 0 and nm[0] != '/' else '') + nm
exist = nm in rosparams
if exist:
cmd = [ 'rosparam', 'get', rosparam ]
ov = subprocess.check_output(cmd).strip()
if ov == v:
continue
elif v == '':
continue
cmd = [ 'rosparam', 'set', rosparam, v ] if v != '' else [ 'rosparam', 'delete', rosparam ]
print(cmd)
subprocess.call(cmd)
#
# Sensing Tab
#
def OnSensingDriver(self, event):
self.OnChecked_obj(event.GetEventObject())
def OnRosbagRecord(self, event):
self.dlg_rosbag_record.show()
obj = event.GetEventObject()
set_val(obj, False)
def create_checkboxes(self, dic, panel, sizer, probe_dic, run_dic, bind_handler):
if 'name' not in dic:
return
obj = None
bdr_flg = wx.ALL
if 'subs' in dic:
lst = []
for d in dic['subs']:
self.create_checkboxes(d, panel, lst, probe_dic, run_dic, bind_handler)
if dic['name']:
obj = static_box_sizer(panel, dic.get('name'))
set_tooltip(obj.GetStaticBox(), dic)
else:
obj = wx.BoxSizer(wx.VERTICAL)
for (o, flg) in lst:
obj.Add(o, 0, wx.EXPAND | flg, 4)
else:
obj = wx.CheckBox(panel, wx.ID_ANY, dic['name'])
set_tooltip(obj, dic)
self.Bind(wx.EVT_CHECKBOX, bind_handler, obj)
bdr_flg = wx.LEFT | wx.RIGHT
if 'probe' in dic:
probe_dic[obj] = (dic['probe'], None)
if 'run' in dic:
run_dic[obj] = (dic['run'], None)
if 'param' in dic:
obj = self.add_config_link(dic, panel, obj)
else:
gdic = self.gdic_get_1st(dic)
self.add_cfg_info(obj, obj, dic.get('name'), None, gdic, False, None)
if sizer is not None:
sizer.append((obj, bdr_flg))
else:
panel.SetSizer(obj)
def add_config_link(self, dic, panel, obj):
cfg_obj = wx.HyperlinkCtrl(panel, wx.ID_ANY, '[config]', '')
fix_link_color(cfg_obj)
self.Bind(wx.EVT_HYPERLINK, self.OnConfig, cfg_obj)
add_objs = (obj, wx.StaticText(panel, wx.ID_ANY, ' '), cfg_obj)
hszr = sizer_wrap(add_objs, wx.HORIZONTAL)
name = dic['name']
pdic = self.load_dic_pdic_setup(name, dic)
gdic = self.gdic_get_1st(dic)
prm = self.get_param(dic.get('param'))
self.add_cfg_info(cfg_obj, obj, name, pdic, gdic, True, prm)
return hszr
def camera_ids(self):
if self.button_synchronization.GetValue():
return []
cmd = "rostopic list | sed -n 's|/image_raw||p' | sed 's/^$/\//'"
return subprocess.check_output(cmd, shell=True).strip().split()
def cam_id_to_obj(self, cam_id, v):
cam_id_obj = self.cfg_prm_to_obj( {'name':cam_id} )
if cam_id_obj is None:
cam_id_obj = StrValObj(cam_id, v)
cam_id_obj.SetValue(v)
return cam_id_obj
def camera_id_hook(self, args):
new_id = args.get('pdic', {}).get('camera_id', '')
ids = args.get('ids', [])
if new_id not in ids:
return
idx = ids.index(new_id)
pp = args.get('param_panel')
if pp:
pp.detach_func()
dlg = args.get('dlg')
if dlg:
dlg.EndModal(idx + 100)
def OnCalibrationPublisher(self, event):
obj = event.GetEventObject()
(_, gdic_org, prm) = self.obj_to_pdic_gdic_prm(obj)
if obj.GetValue():
gdic_org['ids'] = self.camera_ids()
ids = gdic_org.get('ids', [])
if ids == []:
self.OnLaunchKill(event)
return
#
# setup
#
(cmd_dic, cmd, _) = self.obj_to_cmd_dic_cmd_proc(obj)
flags = gdic_org.get('flags', [])[:] # copy
if 'open_dialog' in flags:
flags.remove('open_dialog')
pdic_baks = {}
for cam_id in ids:
(pdic_a, gdic_a, _) = self.name_to_pdic_gdic_prm(cam_id)
pdic = pdic_a if pdic_a else self.load_dic_pdic_setup(cam_id, {})
pdic_baks[cam_id] = pdic.copy()
gdic = gdic_a if gdic_a else gdic_org.copy()
gdic['flags'] = flags
cam_id_obj = self.cam_id_to_obj(cam_id, obj.GetValue())
if not hasattr(cam_id_obj, 'enables_proxy'):
cam_id_obj.enables_proxy = (obj, cam_id_obj.s)
if not pdic_a or not gdic_a:
self.add_cfg_info(cam_id_obj, cam_id_obj, cam_id, pdic, gdic, False, prm)
if not cam_id_obj in cmd_dic:
cmd_dic[ cam_id_obj ] = (cmd, None)
var = self.get_var(prm, 'camera_id', {})
var['choices'] = ids
#
# Dialog
#
cam_id = ids[0]
while obj.GetValue():
(pdic, gdic, _) = self.name_to_pdic_gdic_prm(cam_id)
pdic['camera_id'] = cam_id
dic_list_push(gdic, 'dialog_type', 'open2')
klass_dlg = globals().get(gdic_dialog_name_get(gdic), MyDialogParam)
dlg = klass_dlg(self, pdic=pdic, gdic=gdic, prm=prm)
gdic_v = dic_getset(gdic, 'camera_id', {})
args = { 'pdic':pdic, 'ids':ids, 'param_panel':gdic.get('param_panel'), 'dlg':dlg }
gdic_v['hook_var'] = { 'hook':self.camera_id_hook, 'args':args }
dlg_ret = show_modal(dlg)
dic_list_pop(gdic, 'dialog_type')
pdic['camera_id'] = cam_id # restore
if dlg_ret == 0: # OK
break
idx = dlg_ret - 100
if idx < 0 or len(ids) <= idx: # Cancel
for cam_id in ids:
(pdic, _, _) = self.name_to_pdic_gdic_prm(cam_id)
pdic.update(pdic_baks.get(cam_id))
set_val(obj, False)
return
# Menu changed
cam_id = ids[idx]
#
# Launch / Kill
#
for cam_id in ids:
cam_id_obj = self.cfg_prm_to_obj( {'name':cam_id} )
(pdic, _, _) = self.obj_to_pdic_gdic_prm(cam_id_obj)
pdic['solo_camera'] = False
#print '@', cam_id, cam_id_obj.GetValue()
self.OnLaunchKill_obj(cam_id_obj)
#
# Simulation Tab
#
def rosbag_info_hook(self, v):
if not v:
return
th_start(self.rosbag_info_hook_th, {'v':v} )
def rosbag_info_hook_th(self, ev, v): # thread
err = subprocess.STDOUT
s = subprocess.check_output([ 'rosbag', 'info', v ], stderr=err).strip()
wx.CallAfter(self.label_rosbag_info.SetLabel, s)
wx.CallAfter(self.label_rosbag_info.GetParent().FitInside)
#
# Data Tab
#
#
# Stauts tab
#
def info_col(self, v, v_yellow, v_red, col_normal, col_red):
if v < v_yellow:
return col_normal
if v < v_red:
(nr,ng,nb) = col_normal
(rr,rg,rb) = col_red
return ( (nr+rr)/2, (ng+rg)/2, (nb+rb)/2 )
return col_red
def mem_kb_info(self):
lines = subprocess.check_output('cat /proc/meminfo', shell=True).strip().split(os.linesep)
cvt = lambda (k, v): ( k.replace(':', ''), int(v) )
d = dict( map( lambda s: cvt( filter( lambda s: s!='kB', s.split() ) ), lines ) )
total = d.get('MemTotal')
free = d.get('MemFree') + d.get('Buffers') + d.get('Cached')
return (total, total - free)
def toprc_create(self):
(child_pid, fd) = pty.fork()
if child_pid == 0: # child
os.execvp('top', ['top'])
else: #parent
sec = 0.2
for s in ['1', 'c', 'W', 'q']:
time.sleep(sec)
os.write(fd, s)
def toprc_setup(self, toprc, backup):
if os.path.exists(toprc):
os.rename(toprc, backup)
self.toprc_create()
def toprc_restore(self, toprc, backup):
os.remove(toprc)
if os.path.exists(backup):
os.rename(backup, toprc)
# top command thread
def top_cmd_th(self, ev, setting, cpu_ibls, mem_ibl, toprc, backup):
interval = setting.get('interval', 3)
alert_level = setting.get('alert_level', {})
rate_per_cpu = alert_level.get('rate_per_cpu', 80)
rate_per_cpu_yellow = alert_level.get('rate_per_cpu_yellow', 80)
rate_cpu = alert_level.get('rate_cpu', 80)
rate_mem = alert_level.get('rate_mem', 80)
rate_mem_yellow = alert_level.get('rate_mem_yellow', 80)
for ibl in cpu_ibls:
ibl.lmt_bar_prg = rate_per_cpu
mem_ibl.lmt_bar_prg = rate_mem
alerted = False
cpu_n = get_cpu_count()
while not ev.wait(interval):
s = subprocess.check_output(['sh', '-c', 'env COLUMNS=512 top -b -n 2 -d 0.1']).strip()
i = s.rfind('\ntop -') + 1
s = s[i:]
wx.CallAfter(self.label_top_cmd.SetLabel, s)
wx.CallAfter(self.label_top_cmd.GetParent().FitInside)
k = '%Cpu'
fv_sum = 0
i = 0
for t in s.split('\n'):
if t[:len(k)] != k:
continue
lst = t[1:].split()
v = lst[1] if lst[1] != ':' else lst[2]
if v[0] == ':':
v = v[1:]
fv = str_to_float(v)
col = self.info_col(fv, rate_per_cpu_yellow, rate_per_cpu, (64,64,64), (200,0,0))
if i < cpu_n:
ibl = cpu_ibls[i]
wx.CallAfter(ibl.lb_set, v+'%', col)
wx.CallAfter(ibl.bar_set, int(fv))
fv_sum += fv
i += 1
k = 'KiB Mem:'
(total, used) = self.mem_kb_info()
rate = 100 * used / total
for u in [ 'KB', 'MB', 'GB', 'TB' ]:
if total <= 10 * 1024 or used <= 10:
break
total /= 1024
used /= 1024
col = self.info_col(rate, rate_mem_yellow, rate_mem, (64,64,64), (200,0,0))
tx = str(used) + u + '/' + str(total) + u + '(' + str(rate) + '%)'
wx.CallAfter(mem_ibl.lb_set, tx, col)
wx.CallAfter(mem_ibl.bar_set, rate)
is_alert = (fv_sum >= rate_cpu * cpu_n) or rate >= rate_mem
# --> for test
if os.path.exists('/tmp/alert_test_on'):
is_alert = True
if os.path.exists('/tmp/alert_test_off'):
is_alert = False
# <-- for test
if is_alert and not alerted:
thinf = th_start(self.alert_th, {'bgcol':(200,50,50)})
alerted = True
if not is_alert and alerted:
th_end(thinf)
alerted = False
# top5
i = s.find('\n\n') + 2
lst = s[i:].split('\n')
hd = lst[0]
top5 = lst[1:1+5]
i = hd.rfind('COMMAND')
cmds = [ line[i:].split(' ')[0] for line in top5 ]
i = hd.find('%CPU')
loads = [ line[i-1:].strip().split(' ')[0] for line in top5 ]
for (lb, cmd, load) in zip(self.lb_top5, cmds, loads):
col = self.info_col(str_to_float(load), rate_per_cpu_yellow, rate_per_cpu, (64,64,64), (200,0,0))
wx.CallAfter(lb.SetForegroundColour, col)
wx.CallAfter(lb.SetLabel, cmd + ' (' + load + ' %CPU)')
self.toprc_restore(toprc, backup)
def alert_th(self, bgcol, ev):
wx.CallAfter(self.RequestUserAttention)
c = bgcol
o = wx.NullColour
while not ev.wait(0.5):
for col in [ c, o, c, o, c, o ]:
wx.CallAfter(self.set_bg_all_tabs, col)
time.sleep(0.05)
def log_th(self, file, que, ev):
while not ev.wait(0):
s = file.readline()
if not s:
break
que.put(s)
def logout_th(self, que, interval, tc, ev):
if que == self.log_que_stdout or que == self.log_que_stderr:
while not ev.wait(0):
try:
s = que.get(timeout=1)
except Queue.Empty:
continue
self.log_que.put(s)
if interval <= 0:
continue
ckbox = self.checkbox_stdout if que == self.log_que_stdout else self.checkbox_stderr
if ckbox.GetValue():
self.log_que_show.put( cut_esc(s) )
else: # == self.log_que
f = None
path = self.status_dic.get('log_path')
is_syslog = (path == 'syslog')
if is_syslog:
ident = sys.argv[0].split('/')[-1]
syslog.openlog(ident, syslog.LOG_PID | syslog.LOG_CONS)
elif path:
path = os.path.expandvars(os.path.expanduser(path))
f = open(path, 'a') if path else None
while not ev.wait(0):
try:
s = que.get(timeout=1)
except Queue.Empty:
continue
print s.strip()
sys.stdout.flush()
s = cut_esc(s)
if is_syslog:
syslog.syslog(s)
elif f:
f.write(s)
f.flush()
if is_syslog:
syslog.closelog()
if f:
f.close()
def logshow_th(self, que, interval, tc, ev):
while not ev.wait(interval):
try:
s = que.get(timeout=1)
except Queue.Empty:
continue
wx.CallAfter(append_tc_limit, tc, s)
# que clear
if self.checkbox_stdout.GetValue() is False and \
self.checkbox_stderr.GetValue() is False and \
que.qsize() > 0:
que_clear(que)
wx.CallAfter(tc.Clear)
#
# for Topics tab
#
def OnRefreshTopics(self, event):
self.refresh_topics_list()
def refresh_topics_list(self):
lst = subprocess.check_output([ 'rostopic', 'list' ]).strip().split('\n')
panel = self.panel_topics_list
szr = self.sizer_topics_list
for obj in self.topics_list:
szr.Remove(obj)
obj.Destroy()
self.topics_list = []
for topic in lst:
obj = wx.HyperlinkCtrl(panel, wx.ID_ANY, topic, '')
self.Bind(wx.EVT_HYPERLINK, self.OnTopicLink, obj)
szr.Add(obj, 0, wx.LEFT, 4)
fix_link_color(obj)
self.topics_list.append(obj)
szr.Layout()
panel.SetVirtualSize(szr.GetMinSize())
# info clear
lb = self.label_topics_info
lb.SetLabel('')
# echo clear
self.topics_proc_th_end()
# wait que clear
while self.topics_echo_que.qsize() > 0:
time.sleep(0.1)
tc = self.text_ctrl_topics_echo
tc.Enable(False)
wx.CallAfter(tc.Clear)
wx.CallAfter(tc.Enable, True)
self.topics_echo_sum = 0
self.topic_echo_curr_topic = None
def OnEcho(self, event):
if self.checkbox_topics_echo.GetValue() and self.topic_echo_curr_topic:
self.topics_proc_th_start(self.topic_echo_curr_topic)
else:
self.topics_proc_th_end()
def OnTopicLink(self, event):
obj = event.GetEventObject()
topic = obj.GetLabel()
self.topic_echo_curr_topic = topic
# info
info = subprocess.check_output([ 'rostopic', 'info', topic ]).strip()
lb = self.label_topics_info
lb.SetLabel(info)
lb.GetParent().FitInside()
# echo
self.topics_proc_th_end()
if self.checkbox_topics_echo.GetValue():
self.topics_proc_th_start(topic)
def topics_proc_th_start(self, topic):
out = subprocess.PIPE
err = subprocess.STDOUT
self.topics_echo_proc = psutil.Popen([ 'rostopic', 'echo', topic ], stdout=out, stderr=err)
self.topics_echo_thinf = th_start(self.topics_echo_th)
def topics_proc_th_end(self):
thinf = self.topics_echo_thinf
if thinf:
th_end(thinf)
self.topics_echo_thinf = None
proc = self.topics_echo_proc
if proc:
terminate_children(proc)
terminate(proc)
#proc.wait()
self.topics_echo_proc = None
def topics_echo_th(self, ev):
if not self.topics_echo_proc:
return
file = self.topics_echo_proc.stdout
fl = fcntl.fcntl(file.fileno(), fcntl.F_GETFL)
fcntl.fcntl(file.fileno(), fcntl.F_SETFL, fl | os.O_NONBLOCK)
while not ev.wait(0):
try:
s = file.read(1)
except:
continue
if not s:
break
if self.checkbox_topics_echo.GetValue():
self.topics_echo_que.put(s)
que_clear(self.topics_echo_que)
def topics_echo_show_th(self, ev):
que = self.topics_echo_que
interval = self.topics_dic.get('gui_update_interval_ms', 100) * 0.001
chars_limit = self.topics_dic.get('gui_chars_limit', 10000)
tc = self.text_ctrl_topics_echo
while not ev.wait(interval):
qsz = que.qsize()
if qsz <= 0:
continue
if qsz > chars_limit:
over = qsz - chars_limit
for i in range(over):
try:
que.get(timeout=1)
except Queue.Empty:
break
qsz = chars_limit
arr = []
for i in range(qsz):
try:
s = que.get(timeout=1)
except Queue.Empty:
s = ''
arr.append(s)
s = ''.join(arr)
self.topics_echo_sum += len(s)
rm_chars = 0
if self.topics_echo_sum > chars_limit:
rm_chars = self.topics_echo_sum - chars_limit
self.topics_echo_sum = chars_limit
if self.checkbox_topics_echo.GetValue():
wx.CallAfter(append_tc_limit, tc, s, rm_chars)
#
# State Tabs
#
def getStateId(self, s_text):
if(self.mainstate_dic.has_key(s_text)):
return self.mainstate_dic[s_text]
elif(self.substate_dic.has_key(s_text)):
return self.substate_dic[s_text]
else :
return -99
def OnState(self, event):
pub = rospy.Publisher('state_cmd', std_msgs.msg.Int32, queue_size=10)
msg = std_msgs.msg.Int32()
clicked_event = event.GetEventObject()
msg.data = self.getStateId(clicked_event.GetLabel())
pub.publish(msg)
#
# Common Utils
#
def set_param_panel(self, obj, parent):
(pdic, gdic, prm) = self.obj_to_pdic_gdic_prm(obj)
panel = ParamPanel(parent, frame=self, pdic=pdic, gdic=gdic, prm=prm)
sizer_wrap((panel,), wx.VERTICAL, 0, wx.EXPAND, 0, parent)
k = 'ext_toggle_enables'
gdic[ k ] = gdic.get(k, []) + [ panel ]
def obj_to_varpanel(self, obj, var_name):
gdic = self.obj_to_gdic(obj, {})
return gdic.get(var_name, {}).get('var')
def obj_to_varpanel_tc(self, obj, var_name):
vp = self.obj_to_varpanel(obj, var_name)
return vp.tc if vp and vp.tc else None
def OnConfig(self, event):
self.OnHyperlinked_obj(event.GetEventObject())
def add_params(self, params):
for prm in params:
if 'topic' in prm and 'msg' in prm:
klass_msg = globals()[ prm['msg'] ]
prm['pub'] = rospy.Publisher(prm['topic'], klass_msg, latch=True, queue_size=10)
self.params += params
def gdic_get_1st(self, dic):
gdic = dic.get('gui', {})
gdic['update_func'] = self.update_func
return gdic
def add_cfg_info(self, cfg_obj, obj, name, pdic, gdic, run_disable, prm):
self.config_dic[ cfg_obj ] = { 'obj':obj , 'name':name , 'pdic':pdic , 'gdic':gdic,
'run_disable':run_disable , 'param':prm }
def get_param(self, prm_name):
return next( (prm for prm in self.params if prm['name'] == prm_name), None)
def get_var(self, prm, var_name, def_ret=None):
return next( (var for var in prm.get('vars') if var.get('name') == var_name), def_ret)
def obj_to_cmd_dic(self, obj):
return next( (cmd_dic for cmd_dic in self.all_cmd_dics if obj in cmd_dic), None)
def obj_to_cmd_dic_cmd_proc(self, obj):
cmd_dic = self.obj_to_cmd_dic(obj)
if cmd_dic is None:
return (None, None, None)
(cmd, proc) = cmd_dic.get(obj, (None, None))
return (cmd_dic, cmd, proc)
def OnLaunchKill(self, event):
self.OnLaunchKill_obj(event.GetEventObject())
def OnLaunchKill_obj(self, obj):
self.alias_sync(obj)
obj = self.alias_grp_top_obj(obj)
v = obj.GetValue()
add_args = self.obj_to_add_args(obj, msg_box=v) # no open dialog at kill
if add_args is False:
set_val(obj, not v)
return
(cmd_dic, _, proc_bak) = self.obj_to_cmd_dic_cmd_proc(obj)
self.launch_kill_proc(obj, cmd_dic, add_args=add_args)
(_, _, proc) = self.obj_to_cmd_dic_cmd_proc(obj)
if proc != proc_bak:
self.toggle_enable_obj(obj)
if proc:
self.update_proc_cpu(obj)
def OnRosbagPlay(self, event):
obj = event.GetEventObject()
play = self.button_play_rosbag_play
stop = self.button_stop_rosbag_play
pause = self.button_pause_rosbag_play
(_, _, prm) = self.obj_to_pdic_gdic_prm(play)
var = self.get_var(prm, 'sim_time', {})
if obj == play:
var['v'] = True
self.OnLaunchKill_obj(play)
button_color_change(play)
set_val(stop, False)
set_val(pause, False)
elif obj == stop:
set_val(stop, True)
set_val(play, False)
set_val(pause, False)
var['v'] = False
self.OnLaunchKill_obj(play)
button_color_change(stop)
elif obj == pause:
(_, _, proc) = self.obj_to_cmd_dic_cmd_proc(play)
if proc:
proc.stdin.write(' ')
def OnFtrace(self, event):
obj = event.GetEventObject()
cmd = 'rosrun runtime_manager ftrace.py'
v = obj.GetValue()
self.ftrace_proc_ = self.launch_kill(v, cmd,
None if v else self.ftrace_proc_, obj=obj)
def stdout_file_search(self, file, k):
s = ''
while True:
c = file.read(1)
if not c:
return None
if c != '\r' and c != '\n':
s += c
continue
s = s.strip()
if k in s:
break
s = ''
i = s.find(k) + len(k)
return s[i:]
# thread
def point_cloud_progress_bar(self, file, ev):
obj = self.button_point_cloud
(pdic, _, _) = self.obj_to_pdic_gdic_prm(obj)
n = len(pdic.get('path_pcd', '').split(','))
if n == 0:
return
i = 0
while not ev.wait(0):
s = self.stdout_file_search(file, 'load ')
if not s:
break
err_key = 'failed '
if s[:len(err_key)] != err_key:
i += 1
else:
i -= 1
print s
wx.CallAfter(self.label_point_cloud_bar.set, 100 * i / n)
wx.CallAfter(self.label_point_cloud_bar.clear)
# thread
def rosbag_play_progress_bar(self, file, ev):
while not ev.wait(0):
s = self.stdout_file_search(file, 'Duration:')
if not s:
break
lst = s.split()
pos = str_to_float(lst[0])
# lst[1] is '/'
total = str_to_float(lst[2])
if total == 0:
continue
prg = int(100 * pos / total + 0.5)
pos = str(int(pos))
total = str(int(total))
wx.CallAfter(self.label_rosbag_play_bar.set, prg)
wx.CallAfter(self.label_rosbag_play_pos.SetLabel, pos)
wx.CallAfter(self.label_rosbag_play_total.SetLabel, total)
wx.CallAfter(self.label_rosbag_play_bar.clear)
wx.CallAfter(self.label_rosbag_play_pos.SetLabel, '')
wx.CallAfter(self.label_rosbag_play_total.SetLabel, '')
def alias_sync(self, obj, v=None):
en = None
if getattr(obj, 'IsEnabled', None):
(key, en) = enables_get_last(obj)
if not key:
en = obj.IsEnabled()
grp = self.alias_grp_get(obj)
if getattr(obj, 'GetValue', None):
v = obj.GetValue()
for o in grp:
if o is obj:
continue
if en is not None and o.IsEnabled() != en and not self.is_toggle_button(o):
if key:
enable_set(o, key, en)
else:
o.Enable(en)
if v is not None and getattr(o, 'SetValue', None):
set_val(o, v)
if getattr(o, 'SetInsertionPointEnd', None):
o.SetInsertionPointEnd()
def alias_grp_top_obj(self, obj):
return get_top(self.alias_grp_get(obj), obj)
def alias_grp_get(self, obj):
return next( (grp for grp in self.alias_grps if obj in grp), [])
def create_tree(self, parent, items, tree, item, cmd_dic):
name = items.get('name', '')
if tree is None:
style = wx.TR_HAS_BUTTONS | wx.TR_NO_LINES | wx.TR_HIDE_ROOT | wx.TR_DEFAULT_STYLE | wx.SUNKEN_BORDER
tree = CT.CustomTreeCtrl(parent, wx.ID_ANY, agwStyle=style)
# for disable wrong scrolling at checked
tree.AcceptsFocus = MethodType(lambda self: False, tree, CT.CustomTreeCtrl)
item = tree.AddRoot(name, data=tree)
tree.Bind(wx.EVT_MOTION, self.OnTreeMotion)
else:
ct_type = 1 if 'cmd' in items else 0 # 1:checkbox type
item = tree.AppendItem(item, name, ct_type=ct_type)
if 'desc' in items:
item.SetData(items.get('desc'))
if 'cmd' in items:
cmd_dic[item] = (items['cmd'], None)
pdic = self.load_dic_pdic_setup(name, items)
pnl = wx.Panel(tree, wx.ID_ANY)
add_objs = []
self.new_link(item, name, pdic, self.sys_gdic, pnl, 'sys', 'sys', add_objs)
gdic = self.gdic_get_1st(items)
if 'param' in items:
self.new_link(item, name, pdic, gdic, pnl, 'app', items.get('param'), add_objs)
else:
self.add_cfg_info(item, item, name, None, gdic, False, None)
szr = sizer_wrap(add_objs, wx.HORIZONTAL, flag=wx.ALIGN_CENTER_VERTICAL, parent=pnl)
szr.Fit(pnl)
tree.SetItemWindow(item, pnl)
for sub in items.get('subs', []):
self.create_tree(parent, sub, tree, item, cmd_dic)
return tree
def new_link(self, item, name, pdic, gdic, pnl, link_str, prm_name, add_objs):
lkc = None
if 'no_link' not in gdic.get('flags', []):
lkc = wx.HyperlinkCtrl(pnl, wx.ID_ANY, link_str, "")
if hasattr(lkc, 'SetCanFocus'):
lkc.SetCanFocus(False)
fix_link_color(lkc)
self.Bind(wx.EVT_HYPERLINK, self.OnHyperlinked, lkc)
if len(add_objs) > 0:
add_objs += [ wx.StaticText(pnl, wx.ID_ANY, ' ') ]
add_objs += [ wx.StaticText(pnl, wx.ID_ANY, '['), lkc, wx.StaticText(pnl, wx.ID_ANY, ']') ]
prm = self.get_param(prm_name)
self.add_cfg_info(lkc if lkc else item, item, name, pdic, gdic, False, prm)
def load_dic_pdic_setup(self, name, dic):
name = dic.get('share_val', dic.get('name', name))
pdic = self.load_dic.get(name, {})
self.load_dic[ name ] = pdic
return pdic
def launch_kill_proc(self, obj, cmd_dic, add_args=None):
if obj not in cmd_dic:
set_val(obj, False)
print('not implemented.')
return
v = obj.GetValue()
(cmd, proc) = cmd_dic[obj]
if not cmd:
set_val(obj, False)
proc = self.launch_kill(v, cmd, proc, add_args, obj=obj)
(cfg_obj, dic) = self.cfg_obj_dic( {'obj':obj} )
if cfg_obj and dic.get('run_disable'):
cfg_obj.Enable(not v)
cmd_dic[obj] = (cmd, proc)
if not v:
self.stat_label_off(obj)
def proc_to_cmd_dic_obj(self, proc):
for cmd_dic in self.all_cmd_dics:
obj = next( (obj for (obj, v) in cmd_dic.items() if proc in v), None)
if obj:
return (cmd_dic, obj)
return (None, None)
def launch_kill(self, v, cmd, proc, add_args=None, sigint=None, obj=None, kill_children=None):
msg = None
msg = 'already launched.' if v and proc else msg
msg = 'already terminated.' if not v and proc is None else msg
msg = 'cmd not implemented.' if not cmd else msg
if msg is not None:
print(msg)
return proc
if v:
args = shlex.split(cmd)
if add_args:
args += add_args
print(args) # for debug
f = self.obj_to_gdic(obj, {}).get('stdout_func')
f = eval_if_str(self, f)
f = f if f else self.log_th
out = subprocess.PIPE if f else None
err = subprocess.STDOUT if f else None
if f == self.log_th:
err = subprocess.PIPE
shell = ( len(args) > 0 and args[0] == 'do_shell_exec' )
if shell:
args = ' '.join( args[1:] )
proc = psutil.Popen(args, stdin=subprocess.PIPE, stdout=out, stderr=err, shell=shell)
self.all_procs.append(proc)
if f == self.log_th:
thinf = th_start(f, {'file':proc.stdout, 'que':self.log_que_stdout})
self.all_th_infs.append(thinf)
thinf = th_start(f, {'file':proc.stderr, 'que':self.log_que_stderr})
self.all_th_infs.append(thinf)
elif f:
thinf = th_start(f, {'file':proc.stdout})
self.all_th_infs.append(thinf)
else:
flags = self.obj_to_gdic(obj, {}).get('flags', [])
if sigint is None:
sigint = 'SIGTERM' not in flags
if kill_children is None:
kill_children = 'kill_children' in flags
if kill_children:
terminate_children(proc, sigint)
terminate(proc, sigint)
enables_set(obj, 'proc_wait', False)
th_start( proc_wait_thread, {'proc': proc, 'obj': obj} )
if proc in self.all_procs:
self.all_procs.remove(proc)
proc = None
return proc
def roslaunch_to_nodes(self, cmd):
try:
s = subprocess.check_output(cmd).strip()
return s.split('\n') if s != '' else []
except subprocess.CalledProcessError:
return []
def set_bg_all_tabs(self, col=wx.NullColour):
add_pnls = [
self,
self.tree_ctrl_0,
self.tree_ctrl_1,
self.tree_ctrl_data ]
for tab in self.all_tabs + add_pnls:
tab.SetBackgroundColour(col)
def get_autoware_dir(self):
dir = rtmgr_src_dir() + '../../../../../../'
return os.path.abspath(dir)
def load_yaml(self, filename, def_ret=None):
return load_yaml(filename, def_ret)
def toggle_enable_obj(self, obj):
objs = []
pfs = [ 'button_play_', 'button_stop_', 'button_pause_',
'button_ref_', 'text_ctrl_' ]
key = self.obj_key_get(obj, pfs)
if key:
objs += self.key_objs_get(pfs, key)
gdic = self.obj_to_gdic(obj, {})
objs += [ eval_if_str(self, e) for e in gdic.get('ext_toggle_enables', []) ]
self.toggle_enables(objs)
def toggle_enables(self, objs):
for obj in objs:
if getattr(obj, 'IsEnabled', None):
en = enables_get(obj, 'toggle', obj.IsEnabled())
enables_set(obj, 'toggle', not en)
self.alias_sync(obj)
def is_toggle_button(self, obj):
return self.name_get(obj).split('_')[0] == 'button' and getattr(obj, 'GetValue', None)
def obj_name_split(self, obj, pfs):
name = self.name_get(obj)
if name is None:
return (None, None)
return next( ( ( name[:len(pf)], name[len(pf):] ) for pf in pfs if name[:len(pf)] == pf ), None)
def obj_key_get(self, obj, pfs):
name = self.name_get(obj)
if name is None:
return None
return next( (name[len(pf):] for pf in pfs if name[:len(pf)] == pf), None)
def key_objs_get(self, pfs, key):
return [ self.obj_get(pf + key) for pf in pfs if self.obj_get(pf + key) ]
def name_get(self, obj):
return next( (nm for nm in dir(self) if getattr(self, nm) is obj), None)
def name_get_cond(self, obj, cond=(lambda s : True), def_ret=None):
return next( (nm for nm in dir(self) if cond(nm) and getattr(self, nm) is obj), def_ret)
def val_get(self, name):
obj = self.obj_get(name)
if obj is None:
return None
return obj.GetValue() if getattr(obj, 'GetValue', None) else None
def obj_get(self, name):
return getattr(self, name, None)
def gdic_dialog_type_chk(gdic, name):
dlg_type = dic_list_get(gdic, 'dialog_type', 'config')
tail = '_dialog_only'
lst = [ (k, k[:-len(tail)]) for k in gdic.keys() if k[-len(tail):] == tail ]
only_chk = next( (False for (k,type) in lst if type != dlg_type and name in gdic.get(k, [])), True)
tail = '_dialog_allow'
lst = [ (k, k[:-len(tail)]) for k in gdic.keys() if k[-len(tail):] == tail ]
allow_chk = next( (False for (k,type) in lst if type == dlg_type and name not in gdic.get(k, [])), True)
return only_chk and allow_chk
def gdic_dialog_name_get(gdic):
dlg_type = dic_list_get(gdic, 'dialog_type', 'config')
return gdic.get(dlg_type + '_dialog', gdic.get('dialog', 'MyDialogParam') )
class ParamPanel(wx.Panel):
def __init__(self, *args, **kwds):
self.frame = kwds.pop('frame')
self.pdic = kwds.pop('pdic')
self.gdic = kwds.pop('gdic')
self.prm = kwds.pop('prm')
wx.Panel.__init__(self, *args, **kwds)
self.gdic['param_panel'] = self
obj = self.frame.cfg_prm_to_obj( {'pdic':self.pdic, 'gdic':self.gdic, 'param':self.prm} )
(_, _, proc) = self.frame.obj_to_cmd_dic_cmd_proc(obj)
hszr = None
self.vps = []
self.tmp_msg = None
szr = wx.BoxSizer(wx.VERTICAL)
topic_szrs = (None, None)
vars = self.prm.get('vars')
if self.gdic.get('show_order'):
var_lst = lambda name, vars : [ var for var in vars if var.get('name') == name ]
vars = reduce( lambda lst, name : lst + var_lst(name, vars), self.gdic.get('show_order'), [] )
for var in vars:
name = var.get('name')
if not gdic_dialog_type_chk(self.gdic, name):
continue
gdic_v = self.get_gdic_v_and_chk_enable(name)
if gdic_v is None:
continue
bak_stk_push(gdic_v, 'func')
if gdic_v.get('func'):
continue
v = self.pdic.get(name, var.get('v'))
vp = VarPanel(self, var=var, v=v, update=self.update)
vp.setup_tooltip()
self.vps.append(vp)
gdic_v['var'] = vp
gdic_v['func'] = vp.get_v
prop = gdic_v.get('prop', 0)
border = gdic_v.get('border', 0)
flag = wx_flag_get(gdic_v.get('flags', []))
do_category = 'no_category' not in gdic_v.get('flags', [])
if do_category and self.in_msg(var):
bak = (szr, hszr)
(szr, hszr) = topic_szrs
if szr is None:
szr = static_box_sizer(self, 'topic : ' + self.prm.get('topic'))
bak[0].Add(szr, 0, wx.EXPAND | wx.ALL, 4)
targ_szr = szr
if vp.is_nl():
hszr = None if hszr else hszr
flag |= wx.EXPAND
else:
if hszr is None:
hszr = wx.BoxSizer(wx.HORIZONTAL)
szr.Add(hszr, 0, wx.EXPAND)
flag |= wx.ALIGN_CENTER_VERTICAL
targ_szr = hszr
if do_category and 'rosparam' in var:
rp_szr = static_box_sizer(self, 'rosparam : ' + var.get('rosparam'))
targ_szr.Add(rp_szr, 0, wx.EXPAND | wx.ALL, 4)
targ_szr = rp_szr
user_category = gdic_v.get('user_category')
if user_category is not None and hszr:
user_szr = static_box_sizer(self, user_category, orient=wx.HORIZONTAL)
(flgs, bdr) = gdic_v.get('user_category_add', [ [], 0 ])
targ_szr.Add(user_szr, 0, wx_flag_get(flgs), bdr)
targ_szr = hszr = user_szr
targ_szr.Add(vp, prop, flag, border)
if 'nl' in gdic_v.get('flags', []):
hszr = None
if do_category and self.in_msg(var):
topic_szrs = (szr, hszr)
(szr, hszr) = bak
if 'hline' in gdic_v.get('flags', []) and hszr is None:
szr.Add(wx.StaticLine(self, wx.ID_ANY), 0, wx.EXPAND | wx.TOP | wx.BOTTOM, 4)
if not self.in_msg(var) and var.get('rosparam'):
k = 'ext_toggle_enables'
self.gdic[ k ] = self.gdic.get(k, []) + [ vp ]
enables_set(vp, 'toggle', proc is None)
if 'disable' in gdic_v.get('flags', []):
vp.Enable(False)
if 'hide' in gdic_v.get('flags', []):
vp.Hide()
self.SetSizer(szr)
if 'no_init_update' not in self.prm.get('flags', []):
self.update()
def get_gdic_v_and_chk_enable(self, var_name):
gdic_v = dic_getset(self.gdic, var_name, {})
if 'panel' in gdic_v and dic_eval_if_str(self.frame, gdic_v, 'panel') != self.GetParent():
return None
return gdic_v
def update(self, var=None):
update_func = self.gdic.get('update_func')
if update_func:
self.gdic['update_func_arg_var'] = var
update_func(self.pdic, self.gdic, self.prm)
def detach_func(self):
for var in self.prm.get('vars'):
name = var.get('name')
if not gdic_dialog_type_chk(self.gdic, name):
continue
gdic_v = self.get_gdic_v_and_chk_enable(name)
if gdic_v is None:
continue
if 'func' in gdic_v:
bak_stk_pop(gdic_v, 'func')
vp = gdic_v.get('var')
lst_remove_once(self.gdic.get('ext_toggle_enables', []), vp)
def in_msg(self, var):
if 'topic' not in self.prm or 'msg' not in self.prm:
return False
if self.tmp_msg is None:
klass_msg = globals().get( self.prm.get('msg') )
if klass_msg is None:
return False
self.tmp_msg = klass_msg()
(obj, attr) = msg_path_to_obj_attr(self.tmp_msg, var.get('name'))
return obj and attr in obj.__slots__
class VarPanel(wx.Panel):
def __init__(self, *args, **kwds):
self.var = kwds.pop('var')
v = kwds.pop('v')
self.update = kwds.pop('update')
wx.Panel.__init__(self, *args, **kwds)
self.min = self.var.get('min')
self.max = self.var.get('max')
self.has_slider = self.min is not None and self.max is not None
self.lb = None
label = self.var.get('label', '')
self.kind = self.var.get('kind')
if self.kind == 'radio_box':
choices = self.var.get('choices', [])
style = wx.RA_SPECIFY_COLS if self.var.get('choices_style') == 'h' else wx.RA_SPECIFY_ROWS
self.obj = wx.RadioBox(self, wx.ID_ANY, label, choices=choices, majorDimension=0, style=style)
self.choices_sel_set(v)
self.Bind(wx.EVT_RADIOBOX, self.OnUpdate, self.obj)
return
if self.kind == 'menu':
choices = self.var.get('choices', [])
self.obj = wx.Choice(self, wx.ID_ANY, choices=choices)
self.choices_sel_set(v)
self.Bind(wx.EVT_CHOICE, self.OnUpdate, self.obj)
if label:
self.lb = wx.StaticText(self, wx.ID_ANY, label)
flag = wx.LEFT | wx.ALIGN_CENTER_VERTICAL
sizer_wrap((self.lb, self.obj), wx.HORIZONTAL, 0, flag, 4, self)
return
if self.kind == 'checkbox':
self.obj = wx.CheckBox(self, wx.ID_ANY, label)
self.obj.SetValue(v)
self.Bind(wx.EVT_CHECKBOX, self.OnUpdate, self.obj)
return
if self.kind == 'checkboxes':
item_n = dic_eval_if_str(self, self.var, 'item_n', 1)
self.obj = Checkboxes(self, item_n, label)
self.obj.set(v)
for box in self.obj.boxes:
self.obj.Bind(wx.EVT_CHECKBOX, self.OnUpdate, box)
return
if self.kind == 'toggle_button':
self.obj = wx.ToggleButton(self, wx.ID_ANY, label)
set_val(self.obj, v)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnUpdate, self.obj)
button_color_hdr_setup(self.obj)
return
if self.kind == 'hide':
self.Hide()
return
szr = wx.BoxSizer(wx.HORIZONTAL)
self.lb = wx.StaticText(self, wx.ID_ANY, label)
flag = wx.LEFT | wx.ALIGN_CENTER_VERTICAL
szr.Add(self.lb, 0, flag, 4)
if self.kind == 'path':
v = str(v)
v = path_expand_cmd(v)
v = os.path.expandvars(os.path.expanduser(v))
style = wx.TE_PROCESS_ENTER + wx_flag_get( self.var.get('str_flags', []) )
self.tc = wx.TextCtrl(self, wx.ID_ANY, str(v), style=style)
self.Bind(wx.EVT_TEXT_ENTER, self.OnUpdate, self.tc)
if self.kind in ('num', None):
if self.has_slider:
self.w = self.max - self.min
vlst = [ v, self.min, self.max, self.var['v'] ]
self.is_float = len( [ v_ for v_ in vlst if type(v_) is not int ] ) > 0
self.int_max = 1000 if self.is_float else self.max
self.int_min = 0 if self.is_float else self.min
self.slider = wx.Slider(self, wx.ID_ANY, self.get_int_v(), self.int_min, self.int_max)
self.Bind(wx.EVT_COMMAND_SCROLL, self.OnScroll, self.slider)
self.slider.SetMinSize((82, 27))
szr.Add(self.slider, 1, wx.LEFT | wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, 4)
else:
self.is_float = type(self.var['v']) is not int
self.tc.SetMinSize((40,27))
flag = wx.ALIGN_CENTER_VERTICAL
prop = 1 if self.kind == 'path' or self.kind == 'str' else 0
szr.Add(self.tc, prop, flag, 4)
if self.kind == 'path':
self.ref = wx.Button(self, wx.ID_ANY, 'Ref')
self.Bind(wx.EVT_BUTTON, self.OnRef, self.ref)
button_color_hdr_setup(self.ref)
self.ref.SetMinSize((40,29))
szr.Add(self.ref, 0, flag, 4)
if self.has_slider or self.kind == 'num':
vszr = wx.BoxSizer(wx.VERTICAL)
vszr.Add( self.create_bmbtn("images/inc.png", self.OnIncBtn) )
vszr.Add( self.create_bmbtn("images/dec.png", self.OnDecBtn) )
szr.Add(vszr, 0, wx.ALIGN_CENTER_VERTICAL)
self.SetSizer(szr)
def setup_tooltip(self):
if get_tooltips(self.var):
set_tooltips(self.obj, self.var)
if get_tooltip(self.var):
obj = self.lb if self.lb else (self if self.kind == 'radio_box' else self.obj)
set_tooltip(obj, self.var)
def create_bmbtn(self, filename, hdr):
dir = rtmgr_src_dir()
bm = wx.Bitmap(dir + filename, wx.BITMAP_TYPE_ANY)
style = wx.BORDER_NONE | wx.BU_EXACTFIT
obj = wx.lib.buttons.GenBitmapButton(self, wx.ID_ANY, bm, style=style)
self.Bind(wx.EVT_BUTTON, hdr, obj)
return obj
def get_v(self):
if self.kind in [ 'radio_box', 'menu' ]:
return self.choices_sel_get()
if self.kind in [ 'checkbox', 'toggle_button' ]:
return self.obj.GetValue()
if self.kind == 'checkboxes':
return self.obj.get()
if self.kind == 'hide':
return self.var.get('v')
if self.kind in [ 'path', 'str' ]:
return str(self.tc.GetValue())
if not self.has_slider and self.tc.GetValue() == '':
return ''
return self.get_tc_v()
def get_tc_v(self):
s = self.tc.GetValue()
v = str_to_float(s) if self.is_float else int(s)
if self.has_slider:
v = self.min if v < self.min else v
v = self.max if v > self.max else v
self.tc.SetValue(adjust_num_str(str(v)))
return v
def get_int_v(self):
v = self.get_tc_v()
if self.is_float:
v = int( self.int_max * (v - self.min) / self.w if self.w != 0 else 0 )
return v
def OnScroll(self, event):
iv = self.slider.GetValue()
s = str(iv)
if self.is_float:
v = self.min + float(self.w) * iv / self.int_max
s = str(Decimal(v).quantize(Decimal(str(self.get_step()))))
self.tc.SetValue(s)
self.update(self.var)
def OnIncBtn(self, event):
step = self.get_step()
self.add_v(step)
def OnDecBtn(self, event):
step = self.get_step()
self.add_v(-step)
def get_step(self):
step = self.var.get('step')
return step if step else 0.01 if self.is_float else 1
def add_v(self, step):
ov = self.get_v()
self.tc.SetValue(str(ov + step))
v = self.get_v()
if v != ov:
if self.has_slider:
self.slider.SetValue(self.get_int_v())
self.update(self.var)
def OnUpdate(self, event):
if self.has_slider:
self.slider.SetValue(self.get_int_v())
self.update(self.var)
def OnRef(self, event):
if file_dialog(self, self.tc, self.var) == wx.ID_OK:
self.update(self.var)
def choices_sel_get(self):
return self.obj.GetStringSelection() if self.var.get('choices_type') == 'str' else self.obj.GetSelection()
def choices_sel_set(self, v):
if self.var.get('choices_type') == 'str':
self.obj.SetStringSelection(v)
else:
self.obj.SetSelection(v)
def is_nl(self):
return self.has_slider or self.kind in [ 'path' ]
class MyDialogParam(rtmgr.MyDialogParam):
def __init__(self, *args, **kwds):
pdic = kwds.pop('pdic')
self.pdic_bak = pdic.copy()
gdic = kwds.pop('gdic')
prm = kwds.pop('prm')
rtmgr.MyDialogParam.__init__(self, *args, **kwds)
set_size_gdic(self, gdic)
self.Bind(wx.EVT_CLOSE, self.OnClose)
ok_lb_key = 'open_dialog_ok_label'
if dic_list_get(gdic, 'dialog_type', 'config') == 'open' and ok_lb_key in gdic:
self.button_1.SetLabel( gdic.get(ok_lb_key) )
parent = self.panel_v
frame = self.GetParent()
self.panel = ParamPanel(parent, frame=frame, pdic=pdic, gdic=gdic, prm=prm)
szr = sizer_wrap((self.panel,), wx.VERTICAL, 1, wx.EXPAND, 0, parent)
self.SetTitle(prm.get('name', ''))
(w,h) = self.GetSize()
(w2,_) = szr.GetMinSize()
w2 += 20
if w2 > w:
self.SetSize((w2,h))
def OnOk(self, event):
self.panel.update()
self.panel.detach_func()
self.EndModal(0)
def OnCancel(self, event):
self.panel.pdic.update(self.pdic_bak) # restore
self.panel.detach_func()
self.panel.update()
self.EndModal(-1)
def OnClose(self, event):
self.OnCancel(event)
class MyDialogDpm(rtmgr.MyDialogDpm):
def __init__(self, *args, **kwds):
pdic = kwds.pop('pdic')
self.pdic_bak = pdic.copy()
gdic = kwds.pop('gdic')
prm = kwds.pop('prm')
rtmgr.MyDialogDpm.__init__(self, *args, **kwds)
set_size_gdic(self, gdic)
self.Bind(wx.EVT_CLOSE, self.OnClose)
parent = self.panel_v
frame = self.GetParent()
self.frame = frame
self.panel = ParamPanel(parent, frame=frame, pdic=pdic, gdic=gdic, prm=prm)
szr = sizer_wrap((self.panel,), wx.VERTICAL, 1, wx.EXPAND, 0, parent)
self.SetTitle(prm.get('name', ''))
(w,h) = self.GetSize()
(w2,_) = szr.GetMinSize()
w2 += 20
if w2 > w:
self.SetSize((w2,h))
fix_link_color(self.hyperlink_car)
fix_link_color(self.hyperlink_pedestrian)
def OnOk(self, event):
self.panel.update()
self.panel.detach_func()
self.EndModal(0)
def OnLink(self, event):
obj = event.GetEventObject()
dic = { self.hyperlink_car : self.frame.button_car_dpm,
self.hyperlink_pedestrian : self.frame.button_pedestrian_dpm }
obj = dic.get(obj)
if obj:
self.frame.OnHyperlinked_obj(obj)
def OnCancel(self, event):
self.panel.pdic.update(self.pdic_bak) # restore
self.panel.detach_func()
self.panel.update()
self.EndModal(-1)
def OnClose(self, event):
self.OnCancel(event)
class MyDialogCarPedestrian(rtmgr.MyDialogCarPedestrian):
def __init__(self, *args, **kwds):
pdic = kwds.pop('pdic')
self.gdic = kwds.pop('gdic')
prm = kwds.pop('prm')
rtmgr.MyDialogCarPedestrian.__init__(self, *args, **kwds)
set_size_gdic(self)
self.Bind(wx.EVT_CLOSE, self.OnClose)
frame = self.GetParent()
self.frame = frame
self.SetTitle(prm.get('name', ''))
fix_link_color(self.hyperlink_car)
fix_link_color(self.hyperlink_pedestrian)
def OnLink(self, event):
obj = event.GetEventObject()
car_ped = { self.hyperlink_car : 'car', self.hyperlink_pedestrian : 'pedestrian' }.get(obj, 'car')
obj_key = self.gdic.get('car_pedestrian_obj_key', {}).get(car_ped)
obj = getattr(self.frame, 'button_' + obj_key, None) if obj_key else None
if obj:
self.frame.OnHyperlinked_obj(obj)
self.EndModal(0)
def OnClose(self, event):
self.EndModal(-1)
class MyDialogLaneStop(rtmgr.MyDialogLaneStop):
def __init__(self, *args, **kwds):
self.pdic = kwds.pop('pdic')
self.gdic = kwds.pop('gdic')
self.prm = kwds.pop('prm')
rtmgr.MyDialogLaneStop.__init__(self, *args, **kwds)
set_size_gdic(self)
self.frame = self.GetParent()
name = 'lane_stop'
var = next( ( var for var in self.prm.get('vars', []) if var.get('name') == name ), {} )
v = self.pdic.get( name, var.get('v', False) )
set_val(self.checkbox_lane_stop, v)
def update(self):
update_func = self.gdic.get('update_func')
if update_func:
update_func(self.pdic, self.gdic, self.prm)
def OnTrafficRedLight(self, event):
self.pdic['traffic_light'] = 0
self.update()
def OnTrafficGreenLight(self, event):
self.pdic['traffic_light'] = 1
self.update()
def OnTrafficLightRecognition(self, event):
pub = rospy.Publisher('/config/lane_stop', ConfigLaneStop, latch=True, queue_size=10)
msg = ConfigLaneStop()
v = event.GetEventObject().GetValue()
self.pdic['lane_stop'] = v
msg.manual_detection = not v
pub.publish(msg)
def OnOk(self, event):
self.EndModal(0)
def OnCancel(self, event):
self.EndModal(-1)
class MyDialogNdtMapping(rtmgr.MyDialogNdtMapping):
def __init__(self, *args, **kwds):
self.pdic = kwds.pop('pdic')
self.pdic_bak = self.pdic.copy()
self.gdic = kwds.pop('gdic')
self.prm = kwds.pop('prm')
rtmgr.MyDialogNdtMapping.__init__(self, *args, **kwds)
set_size_gdic(self)
parent = self.panel_v
frame = self.GetParent()
self.panel = ParamPanel(parent, frame=frame, pdic=self.pdic, gdic=self.gdic, prm=self.prm)
sizer_wrap((self.panel,), wx.VERTICAL, 1, wx.EXPAND, 0, parent)
self.update_filename()
self.klass_msg = ConfigNdtMappingOutput
self.pub = rospy.Publisher('/config/ndt_mapping_output', self.klass_msg, queue_size=10)
def update_filename(self):
tc = self.text_ctrl_path
path = tc.GetValue()
(dn, fn) = os.path.split(path)
now = datetime.datetime.now()
fn = 'autoware-%02d%02d%02d.pcd' % (
now.year % 100, now.month, now.day)
path = os.path.join(dn, fn)
set_path(tc, path)
def OnRef(self, event):
tc = self.text_ctrl_path
file_dialog(self, tc, { 'path_type' : 'save' } )
def OnRadio(self, event):
v = self.radio_btn_filter_resolution.GetValue()
tc = self.text_ctrl_filter_resolution
tc.Enable(v)
def OnPcdOutput(self, event):
tc = self.text_ctrl_filter_resolution
v = tc.GetValue() if self.radio_btn_filter_resolution.GetValue() else '0.0'
msg = self.klass_msg()
msg.filename = self.text_ctrl_path.GetValue()
msg.filter_res = str_to_float(v)
self.pub.publish(msg)
def OnOk(self, event):
self.panel.detach_func()
self.EndModal(0)
class MyDialogWaypointLoader(rtmgr.MyDialogWaypointLoader):
def __init__(self, *args, **kwds):
self.pdic = kwds.pop('pdic')
self.pdic_bak = self.pdic.copy()
self.gdic = kwds.pop('gdic')
self.prm = kwds.pop('prm')
rtmgr.MyDialogWaypointLoader.__init__(self, *args, **kwds)
set_size_gdic(self)
parent = self.panel_v
frame = self.GetParent()
self.panel = ParamPanel(parent, frame=frame, pdic=self.pdic, gdic=self.gdic, prm=self.prm)
sizer_wrap((self.panel,), wx.VERTICAL, 1, wx.EXPAND, 0, parent)
self.klass_msg = Bool
self.pub = rospy.Publisher('/config/waypoint_loader_output', self.klass_msg, queue_size=10)
def OnCsvOutput(self, event):
msg = self.klass_msg()
msg.data = True
self.pub.publish(msg)
def OnOk(self, event):
self.panel.detach_func()
self.EndModal(0)
class InfoBarLabel(wx.BoxSizer):
def __init__(self, parent, btm_txt=None, lmt_bar_prg=90, bar_orient=wx.VERTICAL):
wx.BoxSizer.__init__(self, orient=wx.VERTICAL)
self.lb = wx.StaticText(parent, wx.ID_ANY, '')
self.bar = BarLabel(parent, hv=bar_orient, show_lb=False)
bt = wx.StaticText(parent, wx.ID_ANY, btm_txt) if btm_txt else None
self.Add(self.lb, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
if bar_orient == wx.VERTICAL:
sz = self.bar.GetSize()
sz.SetWidth(20)
self.bar.SetMinSize(sz)
self.Add(self.bar, 1, wx.ALIGN_CENTER_HORIZONTAL, 0)
if bt:
self.Add(bt, 0, wx.ALIGN_CENTER_HORIZONTAL, 0)
else:
szr = wx.BoxSizer(wx.HORIZONTAL)
if bt:
szr.Add(bt, 0, 0, 0)
szr.Add(self.bar, 1, 0, 0)
self.Add(szr, 1, wx.EXPAND, 0)
self.lmt_bar_prg = lmt_bar_prg
def lb_set(self, txt, col):
self.lb.SetForegroundColour(col)
self.lb.SetLabel(txt);
self.Layout()
def bar_set(self, prg):
(col1, col2) = (wx.Colour(0,0,250), wx.Colour(0,0,128))
if prg >= self.lmt_bar_prg:
(col1, col2) = (wx.Colour(250,0,0), wx.Colour(128,0,0))
self.bar.set_col(col1, col2)
self.bar.set(prg)
class Checkboxes(wx.Panel):
def __init__(self, parent, item_n, lb):
wx.Panel.__init__(self, parent, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize)
self.boxes = [ wx.CheckBox(self, wx.ID_ANY, lb + str(i)) for i in range(item_n) ]
vsz = wx.BoxSizer(wx.VERTICAL)
for j in range((item_n + 7) / 8):
hsz = wx.BoxSizer(wx.HORIZONTAL)
for i in range(8):
idx = j * 8 + i
if idx < len(self.boxes):
hsz.Add(self.boxes[idx], 0, wx.LEFT, 8)
vsz.Add(hsz)
self.SetSizer(vsz)
vsz.Fit(self)
def set(self, vs):
vs = vs if vs else [ True for box in self.boxes ]
for (box, v) in zip(self.boxes, vs):
box.SetValue(v)
def get(self):
return [ box.GetValue() for box in self.boxes ]
class BarLabel(wx.Panel):
def __init__(self, parent, txt='', pos=wx.DefaultPosition, size=wx.DefaultSize, style=0, hv=wx.HORIZONTAL, show_lb=True):
wx.Panel.__init__(self, parent, wx.ID_ANY, pos, size)
self.lb = wx.StaticText(self, wx.ID_ANY, '', style=style)
self.txt = txt
self.hv = hv
self.dir = wx.SOUTH if hv == wx.HORIZONTAL else wx.EAST
self.show_lb = show_lb
self.prg = -1
self.dflt_col1 = wx.Colour(250,250,250)
self.dflt_col2 = wx.Colour(128,128,128)
self.col1 = self.dflt_col1
self.col2 = self.dflt_col2
self.Bind(wx.EVT_PAINT, self.OnPaint)
def set(self, prg):
self.prg = prg
if self.show_lb:
self.lb.SetLabel(self.txt + str(prg) + '%' if prg >= 0 else '')
self.Refresh()
def set_col(self, col1, col2):
self.col1 = col1 if col1 != wx.NullColour else self.dflt_col1
self.col2 = col2 if col2 != wx.NullColour else self.dflt_col2
def clear(self):
self.set(-1)
def OnPaint(self, event):
dc = wx.PaintDC(self)
(w,h) = self.GetSize()
if self.IsEnabled():
p = (w if self.hv == wx.HORIZONTAL else h) * self.prg / 100
rect = wx.Rect(0, 0, p, h) if self.hv == wx.HORIZONTAL else wx.Rect(0, h-p, w, p)
dc.GradientFillLinear(rect, self.col1, self.col2, self.dir)
rect = wx.Rect(p, 0, w-p, h) if self.hv == wx.HORIZONTAL else wx.Rect(0, 0, w, h-p)
dc.GradientFillLinear(rect, wx.Colour(200,200,200), wx.Colour(220,220,220), self.dir)
else:
rect = wx.Rect(0, 0, w, h)
dc.GradientFillLinear(rect, wx.Colour(250,250,250), wx.Colour(250,250,250), self.dir)
class ColorLabel(wx.Panel):
def __init__(self, parent, lst=[], pos=wx.DefaultPosition, size=wx.DefaultSize, style=0):
wx.Panel.__init__(self, parent, wx.ID_ANY, pos, size)
self.lst = lst
self.Bind(wx.EVT_PAINT, self.OnPaint)
def set(self, lst):
self.lst = lst
self.Refresh()
def OnPaint(self, event):
dc = wx.PaintDC(self)
dc.Clear()
#change_font_point_by_rate(dc, 0.75)
(x,y) = (0,0)
(_, h, _, _) = dc.GetFullTextExtent(' ')
for v in self.lst:
if type(v) is tuple and len(v) == 2:
(x,y) = v
elif type(v) is tuple and len(v) == 3:
dc.SetTextForeground(v)
elif v == '\n':
(x,y) = (0,y+h)
elif type(v) is str:
dc.DrawText(v, x, y)
(w, _, _, _) = dc.GetFullTextExtent(v)
x += w
class StrValObj:
def __init__(self, s, v):
self.s = s
self.v = v
def GetValue(self):
return self.v
def SetValue(self, v):
self.v = v
class MyApp(wx.App):
def OnInit(self):
wx.InitAllImageHandlers()
frame_1 = MyFrame(None, wx.ID_ANY, "")
self.SetTopWindow(frame_1)
buttons_color_hdr_setup(frame_1)
frame_1.Show()
return 1
class MyDialogRosbagRecord(rtmgr.MyDialogRosbagRecord):
def __init__(self, *args, **kwds):
self.cmd_dic = kwds.pop('cmd_dic')
rtmgr.MyDialogRosbagRecord.__init__(self, *args, **kwds)
self.cbs = []
self.refresh()
self.parent = self.GetParent()
self.cmd_dic[ self.button_start ] = ('rosbag record', None)
self.toggles = [ self.button_start, self.button_stop ]
def OnRef(self, event):
tc = self.text_ctrl
file_dialog(self, tc, { 'path_type' : 'save' } )
def OnStart(self, event):
key_obj = self.button_start
path = self.text_ctrl.GetValue()
if path == '':
print('path=""')
return
topic_opt = []
if self.cbs[0].GetValue(): # 'All'
topic_opt = [ '-a' ]
else:
for obj in self.cbs:
if obj.GetValue():
topic_opt += [ obj.GetLabel() ]
if topic_opt == []:
print('topic=[]')
return
args = topic_opt + [ '-O', path ]
split_arg = [ '--split' ] if self.checkbox_split.GetValue() else []
size_arg = self.size_arg_get()
if split_arg and not size_arg:
wx.MessageBox('size is required, with split')
return
args += split_arg + size_arg
(cmd, proc) = self.cmd_dic[ key_obj ]
proc = self.parent.launch_kill(True, cmd, proc, add_args=args, obj=key_obj, kill_children=True)
self.cmd_dic[ key_obj ] = (cmd, proc)
self.parent.toggle_enables(self.toggles)
def OnStop(self, event):
key_obj = self.button_start
(cmd, proc) = self.cmd_dic[ key_obj ]
proc = self.parent.launch_kill(False, cmd, proc, sigint=True, obj=key_obj, kill_children=True)
self.cmd_dic[ key_obj ] = (cmd, proc)
self.parent.toggle_enables(self.toggles)
self.Hide()
def OnRefresh(self, event):
self.refresh()
def refresh(self):
lst = [ 'all' ] + subprocess.check_output([ 'rostopic', 'list' ]).strip().split('\n')
panel = self.panel_1
szr = self.sizer_topic
for obj in self.cbs:
szr.Remove(obj)
obj.Destroy()
self.cbs = []
for topic in lst:
obj = wx.CheckBox(panel, wx.ID_ANY, topic)
bdr = 4 if topic == 'All' else 4 * 4
szr.Add(obj, 0, wx.LEFT, bdr)
self.cbs.append(obj)
szr.Layout()
panel.SetVirtualSize(szr.GetMinSize())
def show(self):
self.Show()
self.update_filename()
def update_filename(self):
tc = self.text_ctrl
path = tc.GetValue()
(dn, fn) = os.path.split(path)
now = datetime.datetime.now()
fn = 'autoware-%04d%02d%02d%02d%02d%02d' % (
now.year, now.month, now.day, now.hour, now.minute, now.second)
path = os.path.join(dn, fn)
set_path(tc, path)
def size_arg_get(self):
tc = self.text_ctrl_size
s = tc.GetValue()
mb = 0
try:
mb = str_to_float(s)
except ValueError:
mb = 0
if mb <= 0:
tc.SetValue('')
return [ '--size=' + str(int(mb * 1024 * 1024)) ] if mb > 0 else []
def set_size_gdic(dlg, gdic={}):
(w, h) = dlg.GetSize()
if not gdic:
gdic = getattr(dlg, 'gdic', {})
nw = gdic.get('dialog_width', w)
nh = gdic.get('dialog_height', h)
if (w, h) != (nw, nh):
dlg.SetSize((nw, nh))
def file_dialog(parent, tc, path_inf_dic={}):
path = tc.GetValue()
path = get_top(path.split(','), path)
(dn, fn) = os.path.split(path)
path_type = path_inf_dic.get('path_type')
if path_type == 'dir':
fns = path_inf_dic.get('filenames')
if type(fns) is str and fns[-5:] == '.yaml':
fns = load_yaml(fns)
if type(fns) is not list:
fns = None
path_inf_dic['filenames'] = fns
dlg = wx.DirDialog(parent, defaultPath=path)
else:
st_dic = { 'save' : wx.FD_SAVE, 'multi' : wx.FD_MULTIPLE }
dlg = wx.FileDialog(parent, defaultDir=dn, defaultFile=fn,
style=st_dic.get(path_type, wx.FD_DEFAULT_STYLE))
ret = show_modal(dlg)
if ret == wx.ID_OK:
path = ','.join(dlg.GetPaths()) if path_type == 'multi' else dlg.GetPath()
if path_type == 'dir' and fns:
path = ','.join([ path + '/' + fn for fn in fns ])
set_path(tc, path)
dlg.Destroy()
return ret
def post_evt_toggle_obj(win, obj, v):
evt_id = {
CT.GenericTreeItem : CT.wxEVT_TREE_ITEM_CHECKED,
wx.CheckBox : wx.EVT_CHECKBOX.typeId,
wx.ToggleButton : wx.EVT_TOGGLEBUTTON.typeId,
wx.Button : wx.EVT_BUTTON.typeId,
}.get( type(obj) )
if evt_id == CT.wxEVT_TREE_ITEM_CHECKED:
evt = CT.TreeEvent( evt_id, win.GetId() )
evt.SetItem(obj)
else:
evt = wx.PyCommandEvent( evt_id, obj.GetId() )
evt.SetEventObject(obj)
set_val(obj, v)
wx.PostEvent(win, evt)
def button_color_change(btn, v=None):
if v is None and type(btn) is wx.ToggleButton:
v = btn.GetValue()
key = ( v , btn.IsEnabled() )
dic = { (True,True):('#F9F9F8','#8B8BB9'), (True,False):('#F9F9F8','#E0E0F0') }
(fcol, bcol) = dic.get(key, (wx.NullColour, wx.NullColour))
btn.SetForegroundColour(fcol)
btn.SetBackgroundColour(bcol)
def OnButtonColorHdr(event):
btn = event.GetEventObject()
dic = { wx.EVT_TOGGLEBUTTON.typeId : None,
wx.EVT_LEFT_DOWN.typeId : True,
wx.EVT_LEFT_UP.typeId : False }
v = dic.get(event.GetEventType(), '?')
if v != '?':
button_color_change(btn, v)
event.Skip()
btn_null_bgcol = None
def is_btn_null_bgcol(btn):
global btn_null_bgcol
bak = btn.GetBackgroundColour()
if btn_null_bgcol is None:
btn.SetBackgroundColour(wx.NullColour)
btn_null_bgcol = btn.GetBackgroundColour()
if bak != btn_null_bgcol:
btn.SetBackgroundColour(bak)
return bak == btn_null_bgcol
def button_color_hdr_setup(btn):
hdr = OnButtonColorHdr
if type(btn) is wx.ToggleButton:
btn.Bind(wx.EVT_TOGGLEBUTTON, hdr)
elif type(btn) is wx.Button and is_btn_null_bgcol(btn):
btn.Bind(wx.EVT_LEFT_DOWN, hdr)
btn.Bind(wx.EVT_LEFT_UP, hdr)
def buttons_color_hdr_setup(frm_obj):
key = 'button_'
btns = [ getattr(frm_obj, nm) for nm in dir(frm_obj) if nm[:len(key)] == key ]
for btn in btns:
button_color_hdr_setup(btn)
def show_modal(dlg):
buttons_color_hdr_setup(dlg)
return dlg.ShowModal()
def load_yaml(filename, def_ret=None):
dir = rtmgr_src_dir()
path = dir + filename
if not os.path.isfile(path):
return def_ret
print('loading ' + filename)
f = open(dir + filename, 'r')
d = yaml.load(f)
f.close()
return d
def terminate_children(proc, sigint=False):
for child in get_proc_children(proc):
terminate_children(child, sigint)
terminate(child, sigint)
def terminate(proc, sigint=False):
if sigint:
proc.send_signal(signal.SIGINT)
else:
proc.terminate()
def proc_wait_thread(ev, proc, obj):
proc.wait()
wx.CallAfter(enables_set, obj, 'proc_wait', True)
th_end((None, ev))
def th_start(target, kwargs={}):
ev = threading.Event()
kwargs['ev'] = ev
th = threading.Thread(target=target, kwargs=kwargs)
th.daemon = True
th.start()
return (th, ev)
def th_end((th, ev)):
if not th:
th = threading.current_thread()
threading.Timer( 1.0, th_end, ((th, ev),) ).start()
return
ev.set()
th.join()
def que_clear(que):
with que.mutex:
que.queue.clear()
def append_tc_limit(tc, s, rm_chars=0):
if rm_chars > 0:
tc.Remove(0, rm_chars)
tc.AppendText(s)
def cut_esc(s):
while True:
i = s.find(chr(27))
if i < 0:
break
j = s.find('m', i)
if j < 0:
break
s = s[:i] + s[j+1:]
return s
def change_font_point_by_rate(obj, rate=1.0):
font = obj.GetFont()
pt = font.GetPointSize()
pt = int(pt * rate)
font.SetPointSize(pt)
obj.SetFont(font)
def fix_link_color(obj):
t = type(obj)
if t is CT.GenericTreeItem or t is CT.CustomTreeCtrl:
obj.SetHyperTextVisitedColour(obj.GetHyperTextNewColour())
elif t is wx.HyperlinkCtrl:
obj.SetVisitedColour(obj.GetNormalColour())
def get_tooltip(dic):
return dic.get('desc')
def get_tooltips(dic):
return dic.get('descs', [])
def set_tooltip(obj, dic):
set_tooltip_str(obj, get_tooltip(dic))
def set_tooltip_str(obj, s):
if s and getattr(obj, 'SetToolTipString', None):
obj.SetToolTipString(s)
def set_tooltips(obj, dic):
lst = get_tooltips(dic)
if lst and getattr(obj, 'SetItemToolTip', None):
for (ix, s) in enumerate(lst):
obj.SetItemToolTip(ix, s)
def get_tooltip_obj(obj):
if getattr(obj, 'GetToolTip', None):
t = obj.GetToolTip()
return t.GetTip() if t else None
return None
def scaled_bitmap(bm, scale):
(w, h) = bm.GetSize()
img = wx.ImageFromBitmap(bm)
img = img.Scale(w * scale, h * scale, wx.IMAGE_QUALITY_HIGH)
return wx.BitmapFromImage(img)
def sizer_wrap(add_objs, orient=wx.VERTICAL, prop=0, flag=0, border=0, parent=None):
szr = wx.BoxSizer(orient)
for obj in add_objs:
szr.Add(obj, prop, flag, border)
if parent:
parent.SetSizer(szr)
return szr
def static_box_sizer(parent, s, orient=wx.VERTICAL):
sb = wx.StaticBox(parent, wx.ID_ANY, s)
sb.Lower()
return wx.StaticBoxSizer(sb, orient)
def wx_flag_get(flags):
dic = { 'top' : wx.TOP, 'bottom' : wx.BOTTOM, 'left' : wx.LEFT, 'right' : wx.RIGHT,
'all' : wx.ALL, 'expand' : wx.EXPAND, 'fixed_minsize' : wx.FIXED_MINSIZE,
'center_v' : wx.ALIGN_CENTER_VERTICAL, 'center_h' : wx.ALIGN_CENTER_HORIZONTAL,
'passwd' : wx.TE_PASSWORD }
lst = [ dic.get(f) for f in flags if f in dic ]
return reduce(lambda a,b : a+b, [0] + lst)
def msg_path_to_obj_attr(msg, path):
lst = path.split('.')
obj = msg
for attr in lst[:-1]:
obj = getattr(obj, attr, None)
return (obj, lst[-1])
def str_to_rosval(s, type_str, def_ret=None):
cvt_dic = {
'int8':int , 'int16':int , 'int32':int ,
'uint8':int , 'uint16':int , 'uint32':int ,
'int64':long , 'uint64':long,
'float32':float, 'float64':float,
}
t = cvt_dic.get(type_str)
s = s.replace(',','.') if t is float and type(s) is str else s
return t(s) if t else def_ret
def str_to_float(s):
return float( s.replace(',','.') )
def set_path(tc, v):
tc.SetValue(v)
tc.SetInsertionPointEnd()
def set_val(obj, v):
func = getattr(obj, 'SetValue', getattr(obj, 'Check', None))
if func:
func(v)
obj_refresh(obj)
if type(obj) is wx.ToggleButton:
button_color_change(obj)
def enables_set(obj, k, en):
if hasattr(obj, 'enables_proxy'):
(obj, k) = obj.enables_proxy
d = attr_getset(obj, 'enabLes', {})
d[k] = en
d['last_key'] = k
if hasattr(obj, 'Enable'):
obj.Enable( all( d.values() ) )
obj_refresh(obj)
if isinstance(obj, wx.HyperlinkCtrl):
if not hasattr(obj, 'coLor'):
obj.coLor = { True:obj.GetNormalColour(), False:'#808080' }
c = obj.coLor.get(obj.IsEnabled())
obj.SetNormalColour(c)
obj.SetVisitedColour(c)
def enables_get(obj, k, def_ret=None):
return attr_getset(obj, 'enabLes', {}).get(k, def_ret)
def enables_get_last(obj):
k = enables_get(obj, 'last_key')
return (k, enables_get(obj, k))
def obj_refresh(obj):
if type(obj) is CT.GenericTreeItem:
while obj.GetParent():
obj = obj.GetParent()
tree = obj.GetData()
tree.Refresh()
# dic_list util (push, pop, get)
def dic_list_push(dic, key, v):
dic_getset(dic, key, []).append(v)
def dic_list_pop(dic, key):
dic.get(key, [None]).pop()
def dic_list_get(dic, key, def_ret=None):
return dic.get(key, [def_ret])[-1]
def bak_stk_push(dic, key):
if key in dic:
k = key + '_bak_str'
dic_getset(dic, k, []).append( dic.get(key) )
def bak_stk_pop(dic, key):
k = key + '_bak_str'
stk = dic.get(k, [])
if len(stk) > 0:
dic[key] = stk.pop()
else:
del dic[key]
def bak_stk_set(dic, key, v):
bak_str_push(dic, key)
dic[key] = v
def attr_getset(obj, name, def_ret):
if not hasattr(obj, name):
setattr(obj, name, def_ret)
return getattr(obj, name)
def dic_getset(dic, key, def_ret):
if key not in dic:
dic[key] = def_ret
return dic.get(key)
def lst_append_once(lst, v):
exist = v in lst
if not exist:
lst.append(v)
return exist
def lst_remove_once(lst, v):
exist = v in lst
if exist:
lst.remove(v)
return exist
def get_top(lst, def_ret=None):
return lst[0] if len(lst) > 0 else def_ret
def adjust_num_str(s):
if '.' in s:
while s[-1] == '0':
s = s[:-1]
if s[-1] == '.':
s = s[:-1]
return s
def rtmgr_src_dir():
return os.path.abspath(os.path.dirname(__file__)) + "/"
def path_expand_cmd(path):
lst = path.split('/')
s = lst[0]
if s[:2] == '$(' and s[-1] == ')':
cmd = s[2:-1].split(' ')
lst[0] = subprocess.check_output(cmd).strip()
path = '/'.join(lst)
return path
def eval_if_str(self, v):
return eval(v) if type(v) is str else v
def dic_eval_if_str(self, dic, key, def_ret=None):
return eval_if_str( self, dic.get(key, def_ret) )
def prn_dict(dic):
for (k,v) in dic.items():
print (k, ':', v)
def send_to_proc_manager(order):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
sock.connect(PROC_MANAGER_SOCK)
except socket.error:
print('Failed connect to {}'.format(PROC_MANAGER_SOCK))
return -1
sock.send(yaml.dump(order))
ret = sock.recv(1024)
sock.close()
return int(ret) == 0
def set_process_nice(proc, value):
order = {
"name": "nice",
"pid": proc.pid,
"nice": value
}
return send_to_proc_manager(order)
def set_process_cpu_affinity(proc, cpus):
order = {
"name": "cpu_affinity",
"pid": proc.pid,
"cpus": cpus,
}
return send_to_proc_manager(order)
def shutdown_proc_manager():
order = {
"name": "shutdown",
}
return send_to_proc_manager(order)
def set_scheduling_policy(proc, policy, priority):
order = {
"name": "scheduling_policy",
"pid": proc.pid,
"policy": policy,
"priority": priority,
}
return send_to_proc_manager(order)
# psutil 3.x to 1.x backward compatibility
def get_cpu_count():
try:
return psutil.NUM_CPUS
except AttributeError:
return psutil.cpu_count()
def get_proc_children(proc, r=False):
try:
return proc.get_children(recursive=r)
except AttributeError:
return proc.children(recursive=r)
def get_proc_nice(proc):
try:
return proc.get_nice()
except AttributeError:
return proc.nice()
def get_proc_cpu_affinity(proc):
try:
return proc.get_cpu_affinity()
except AttributeError:
return proc.cpu_affinity()
if __name__ == "__main__":
gettext.install("app")
app = MyApp(0)
app.MainLoop()
# EOF
| {
"content_hash": "59f7b5dfbbe4f74cfccc94b451187667",
"timestamp": "",
"source": "github",
"line_count": 3446,
"max_line_length": 122,
"avg_line_length": 29.3476494486361,
"alnum_prop": 0.6394217458371237,
"repo_name": "suzlab/Autoware",
"id": "31d55c940f5ec064b4f1732cc004b4c2a3d32d75",
"size": "101154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ros/src/util/packages/runtime_manager/scripts/runtime_manager_dialog.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1500382"
},
{
"name": "C++",
"bytes": "7522176"
},
{
"name": "CMake",
"bytes": "292891"
},
{
"name": "CSS",
"bytes": "22550"
},
{
"name": "Cuda",
"bytes": "281867"
},
{
"name": "GDB",
"bytes": "23"
},
{
"name": "HTML",
"bytes": "42329"
},
{
"name": "Java",
"bytes": "539698"
},
{
"name": "JavaScript",
"bytes": "215453"
},
{
"name": "Makefile",
"bytes": "19653"
},
{
"name": "Matlab",
"bytes": "20217"
},
{
"name": "Prolog",
"bytes": "1723"
},
{
"name": "Python",
"bytes": "840645"
},
{
"name": "QMake",
"bytes": "12635"
},
{
"name": "Shell",
"bytes": "24900"
}
],
"symlink_target": ""
} |
import math
from base import Layout
from .. import utils, manager
ROWCOL = 1 # do rows at a time left to right top down
COLROW = 2 # do cols top to bottom, left to right
GOLDEN_RATIO = 1.618
class GridInfo(object):
"""
Calculates sizes for grids
>>> gi = GridInfo(.5, 5, 600, 480)
>>> gi.calc()
(1, 5, 1)
>>> gi.get_sizes()
[(0, 0, 120, 480), (120, 0, 120, 480), (240, 0, 120, 480), (360, 0, 120, 480), (480, 0, 120, 480)]
>>> gi = GridInfo(6, 5, 600, 480)
>>> gi.get_sizes()
[(0, 0, 600, 96), (0, 96, 600, 96), (0, 192, 600, 96), (0, 288, 600, 96), (0, 384, 600, 96)]
>>> gi = GridInfo(1, 5, 600, 480)
>>> gi.get_sizes()
[(0, 0, 200, 240), (200, 0, 200, 240), (400, 0, 200, 240), (0, 240, 300, 240), (200, 240, 200, 240)]
>>> foo = GridInfo(1.6, 7, 400,370)
>>> foo.get_sizes(500,580)
"""
def __init__(self, ratio, num_windows, width, height):
self.ratio = ratio
self.num_windows = num_windows
self.width = width
self.height = height
self.num_rows = 0
self.num_cols = 0
def calc(self, num_windows, width, height):
"""
returns (rows, cols, orientation) tuple given input
"""
best_ratio = None
best_rows_cols_orientation = None
for rows, cols, orientation in self._possible_grids(num_windows):
sample_width = float(width)/cols
sample_height = float(height)/rows
sample_ratio = sample_width / sample_height
diff = abs(sample_ratio - self.ratio)
if best_ratio is None or diff < best_ratio:
best_ratio = diff
best_rows_cols_orientation = rows, cols, orientation
return best_rows_cols_orientation
def _possible_grids(self, num_windows):
"""
iterates over possible grids given a number of windows
"""
if num_windows < 2:
end = 2
else:
end = num_windows/2 + 1
for rows in range(1, end):
cols = int(math.ceil(float(num_windows) / rows))
yield rows, cols, ROWCOL
if rows != cols:
# also want the reverse test
yield cols, rows, COLROW
def get_sizes_advanced(self, total_width, total_height,xoffset=0, yoffset=0):
"""
after every row/column recalculate remaining area
"""
results = []
width = total_width
height = total_height
while len(results) < self.num_windows:
remaining = self.num_windows - len(results)
orien, sizes = self._get_row_or_col( remaining, width, height, xoffset, yoffset)
results.extend(sizes)
if orien == ROWCOL:
# adjust height/yoffset
height -= sizes[-1][-1]
yoffset += sizes[-1][-1]
else:
width -= sizes[-1][-2]
xoffset += sizes[-1][-2]
return results
def _get_row_or_col(self, num_windows, width, height, xoffset, yoffset):
"""
process one row (or col) at a time
"""
rows, cols, orientation = self.calc(num_windows, width, height)
results = []
if orientation == ROWCOL:
x = 0
y = 0
for i, col in enumerate(range(cols)):
w_width = width/cols
w_height = height/rows
if i == cols -1:
w_width = width - x
results.append((x + xoffset, y + yoffset, w_width, w_height))
x += w_width
elif orientation == COLROW:
x = 0
y = 0
for i, col in enumerate(range(rows)):
w_width = width/cols
w_height = height/rows
if i == rows -1:
w_height = height - y
results.append((x + xoffset, y + yoffset, w_width, w_height))
y += w_height
return orientation, results
def get_sizes(self, total_width, total_height, xoffset=0, yoffset=0):
width = 0
height = 0
results = []
rows, cols, orientation = self.calc(self.num_windows, total_width, total_height)
if orientation == ROWCOL:
y = 0
for i, row in enumerate(range(rows)):
x = 0
width = total_width/cols
for j, col in enumerate(range(cols)):
height = total_height/rows
if i == rows - 1 and j == 0:
# last row
remaining = self.num_windows - len(results)
width = total_width/remaining
elif j == cols - 1 or len(results) + 1 == self.num_windows:
# since we are dealing with integers,
# make last column (or item) take up remaining space
width = total_width - x
results.append((x + xoffset, y + yoffset,
width,
height))
if len(results) == self.num_windows:
return results
x += width
y += height
else:
x = 0
for i, col in enumerate(range(cols)):
y = 0
height = total_height/rows
for j, row in enumerate(range(rows)):
width = total_width/cols
# down first
if i == cols - 1 and j == 0:
remaining = self.num_windows - len(results)
height = total_height/remaining
elif j == rows -1 or len(results) + 1 == self.num_windows:
height = total_height - y
results.append((x + xoffset, #i * width + xoffset,
y + xoffset, #j * height + yoffset,
width,
height))
if len(results) == self.num_windows:
return results
y += height
x += width
return results
class RatioTile(Layout):
"""
Tries to tile all windows in the width/height ratio passed in
"""
name="ratiotile"
defaults = manager.Defaults(
("border_focus", "#0000ff", "Border colour for the focused window."),
("border_normal", "#000000", "Border colour for un-focused winows."),
("border_width", 1, "Border width.")
)
def __init__(self, ratio=GOLDEN_RATIO, ratio_increment=0.1, fancy=False, **config):
Layout.__init__(self, **config)
self.windows = []
self.ratio_increment = ratio_increment
self.ratio = ratio
self.focused = None
self.dirty = True # need to recalculate
self.layout_info = []
self.last_size = None
self.fancy = fancy
def clone(self, group):
c = Layout.clone(self, group)
c.windows = []
return c
def focus(self, c):
self.focused = c
def blur(self):
self.focused = None
def add(self, w):
self.dirty = True
self.windows.insert(0, w)
def remove(self, w):
self.dirty = True
if self.focused is w:
self.focused = None
self.windows.remove(w)
if self.windows: # and w is self.focused:
self.focused = self.windows[0]
return self.focused
def configure(self, win, screen):
# force recalc
if self.last_size and not self.dirty:
if screen.width != self.last_size[0] or screen.height != self.last_size[1]:
self.dirty = True
if self.dirty:
gi = GridInfo(self.ratio, len(self.windows),
screen.width,
screen.height)
self.last_size = screen.width, screen.height
if self.fancy:
method = gi.get_sizes_advanced
else:
method = gi.get_sizes
self.layout_info = method(screen.width,
screen.height,
screen.x,
screen.y)
self.dirty = False
try:
idx = self.windows.index(win)
except ValueError, e:
win.hide()
return
x, y, w, h = self.layout_info[idx]
if win is self.focused:
bc = self.group.qtile.colorPixel(self.border_focus)
else:
bc = self.group.qtile.colorPixel(self.border_normal)
win.place(x, y, w-self.border_width*2, h-self.border_width*2,
self.border_width, bc)
win.unhide()
def info(self):
return { 'windows': [x.name for x in self.windows],
'ratio' :self.ratio,
'focused' : self.focused.name if self.focused else None,
'layout_info' : self.layout_info
}
def up(self):
if self.windows:
utils.shuffleUp(self.windows)
self.group.layoutAll()
def down(self):
if self.windows:
utils.shuffleDown(self.windows)
self.group.layoutAll()
def focus_first(self):
if self.windows:
return self.windows[0]
def focus_next(self, win):
idx = self.windows.index(win)
if len(self.windows) > idx+1:
return self.windows[idx+1]
def focus_last(self):
if self.windows:
return self.windows[-1]
def focus_prev(self, win):
idx = self.windows.index(win)
if idx > 0:
return self.windows[idx-1]
def getNextClient(self):
nextindex = self.windows.index(self.focused) + 1
if nextindex >= len(self.windows):
nextindex = 0
return self.windows[nextindex]
def getPreviousClient(self):
previndex = self.windows.index(self.focused) - 1
if previndex < 0:
previndex = len(self.windows) - 1;
return self.windows[previndex]
def next(self):
n = self.getPreviousClient()
self.group.focus(n, True)
def previous(self):
n = self.getNextClient()
self.group.focus(n, True)
def shuffle(self, function):
if self.windows:
function(self.windows)
self.group.layoutAll()
def cmd_down(self):
self.down()
def cmd_up(self):
self.up()
def cmd_next(self):
self.next()
def cmd_previous(self):
self.previous()
def cmd_decrease_ratio(self):
new_ratio = self.ratio - self.ratio_increment
if new_ratio < 0:
return
self.ratio = new_ratio
self.group.layoutAll()
def cmd_increase_ratio(self):
self.ratio += self.ratio_increment
self.group.layoutAll()
def cmd_info(self):
return self.info()
if __name__ == '__main__':
import doctest
doctest.testmod()
| {
"content_hash": "634eeddb49ed15c3f4eefd37a7bf0b05",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 104,
"avg_line_length": 32.21325648414986,
"alnum_prop": 0.4990159241366971,
"repo_name": "andrelaszlo/qtile",
"id": "1d234bd926cd3350d0e82f20aac582d1df1ae2a3",
"size": "11178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libqtile/layout/ratiotile.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import json
import flask
from google.appengine.api import urlfetch
import github
import auth
import config
import model
import util
import task
from main import app
@app.route('/<username>/<path:repo>')
@app.route('/<username>')
def gh_account(username, repo=None):
username_ = username.lower()
account_db = model.Account.get_by_id(username_)
if not account_db:
g = github.Github(config.CONFIG_DB.github_username, config.CONFIG_DB.github_password)
try:
account = g.get_user(username_)
except github.GithubException as error:
return flask.abort(error.status)
account_db = model.Account.get_or_insert(
account.login,
avatar_url=account.avatar_url.split('?')[0],
email=account.email or '',
followers=account.followers,
joined=account.created_at,
name=account.name or account.login,
organization=account.type == 'Organization',
public_repos=account.public_repos,
username=account.login,
)
if account_db.username != username or repo:
return flask.redirect(flask.url_for('gh_account', username=account_db.username))
task.queue_account(account_db)
repo_dbs, repo_cursor = account_db.get_repo_dbs()
return flask.render_template(
'account/view.html',
html_class='gh-view',
title='%s%s' % ('#%d - ' % account_db.rank if account_db.rank else '', account_db.name),
description='https://github.com/' + account_db.username,
image_url=account_db.avatar_url,
canonical_url=flask.url_for('gh_account', username=username, _external=True),
account_db=account_db,
repo_dbs=repo_dbs,
next_url=util.generate_next_url(repo_cursor),
username=account_db.username,
)
###############################################################################
# Cron Stuff
###############################################################################
@app.route('/admin/cron/repo/')
@auth.cron_required
def gh_admin_top():
stars = util.param('stars', int) or 10000
page = util.param('page', int) or 1
per_page = util.param('per_page', int) or 100
# TODO: fix formatting
result = urlfetch.fetch('https://api.github.com/search/repositories?q=stars:>=%s&sort=stars&order=asc&page=%d&per_page=%d' % (stars, page, per_page))
if result.status_code == 200:
repos = json.loads(result.content)
else:
flask.abort(result.status_code)
for repo in repos['items']:
account = repo['owner']
account_db = model.Account.get_or_insert(
account['login'],
avatar_url=account['avatar_url'].split('?')[0],
email=account['email'] if 'email' in account else '',
name=account['login'],
followers=account['followers'] if 'followers' in account else 0,
organization=account['type'] == 'Organization',
username=account['login'],
)
return 'OK %d of %d' % (len(repos['items']), repos['total_count'])
@app.route('/admin/cron/sync/')
@auth.cron_required
def admin_cron():
account_dbs, account_cursor = model.Account.get_dbs(
order=util.param('order') or 'synced',
status=util.param('status'),
)
for account_db in account_dbs:
task.queue_account(account_db)
return 'OK'
@app.route('/admin/cron/repo/cleanup/')
@auth.cron_required
def admin_repo_cleanup():
task.queue_repo_cleanup(util.param('days', int) or 5)
return 'OK'
@app.route('/admin/cron/account/cleanup/')
@auth.cron_required
def admin_account_cleanup():
task.queue_account_cleanup(util.param('stars', int) or 9999)
return 'OK'
@app.route('/admin/cron/rank/')
@auth.cron_required
def admin_rank():
task.rank_accounts()
return 'OK'
| {
"content_hash": "bce1fb7deb8e14d007ee94f9cd7227f7",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 151,
"avg_line_length": 29.379032258064516,
"alnum_prop": 0.6390337633818282,
"repo_name": "lipis/github-stats",
"id": "3a9ffd94ee9608ed5f0b646bccb70bb1ba9795b1",
"size": "3660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/control/gh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6164"
},
{
"name": "CoffeeScript",
"bytes": "9554"
},
{
"name": "HTML",
"bytes": "81896"
},
{
"name": "JavaScript",
"bytes": "9136"
},
{
"name": "Python",
"bytes": "129623"
},
{
"name": "Shell",
"bytes": "1192"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
import locale
import os
import string
from django.conf import settings
from django.utils.encoding import smart_text
from django.utils.translation import get_language, ugettext_lazy as _
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
def get_admin(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_ADMIN'.format(name.upper()),
'cmsplugin_articles.admins.{}Admin'.format(name),
))
def get_form(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_FORM'.format(name.upper()),
'cmsplugin_articles.forms.{}Form'.format(name),
))
def get_html_field():
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_HTML_FIELD',
'djangocms_text_ckeditor.fields.HTMLField',
))
def get_menu(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_MENU'.format(name.upper()),
'cmsplugin_articles.cms_menus.{}Menu'.format(name),
))
def get_model(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_MODEL'.format(name.upper()),
'cmsplugin_articles.models.{}'.format(name),
))
def get_plugin(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_PLUGIN'.format(name.upper()),
'cmsplugin_articles.plugins.{}Plugin'.format(name),
))
def get_toolbar(name):
return import_string(getattr(settings,
'CMSPLUGIN_ARTICLES_{}_TOOLBAR'.format(name.upper()),
'cmsplugin_articles.cms_toolbars.{}Toolbar'.format(name),
))
def get_view(name):
view = import_string(getattr(
settings,
'CMSPLUGIN_ARTICLES_{}_VIEW'.format(name.upper()),
'cmsplugin_articles.views.{}'.format(name),
))
return hasattr(view, 'as_view') and view.as_view() or view
# this is used to get all these names translated
WEEKDAYS = {
0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'),
5:_('Saturday'), 6:_('Sunday')
}
WEEKDAYS_ABBR = {
0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'),
5:_('Sat'), 6:_('Sun')
}
MONTHS = {
1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'),
7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'),
12:_('December')
}
MONTHS_ABBR = {
1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'),
7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec')
}
| {
"content_hash": "e65904f5dfb81c1b6369274cc683c013",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 125,
"avg_line_length": 31.28735632183908,
"alnum_prop": 0.6204996326230713,
"repo_name": "misli/cmsplugin-articles",
"id": "1b310a24a6b266f9ecf1d27e4e2e1d368fb5f271",
"size": "2722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cmsplugin_articles/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1723"
},
{
"name": "Python",
"bytes": "19924"
}
],
"symlink_target": ""
} |
from django import forms
from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm
from django.utils.translation import ugettext_lazy as _
class AdminAuthenticationForm(AuthenticationForm):
"""
A custom authentication form used in the admin app.
"""
error_messages = {
'invalid_login': _(
"Please enter the correct %(username)s and password for a staff "
"account. Note that both fields may be case-sensitive."
),
}
required_css_class = 'required'
def confirm_login_allowed(self, user):
if not user.is_active or not user.is_staff:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name}
)
class AdminPasswordChangeForm(PasswordChangeForm):
required_css_class = 'required'
| {
"content_hash": "7e97671b50a52e4b6a1d1741416593ad",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 77,
"avg_line_length": 33.392857142857146,
"alnum_prop": 0.6524064171122995,
"repo_name": "auready/django",
"id": "7c3d196012d182b07c765d0b81f352c14c50327a",
"size": "935",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "django/contrib/admin/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53169"
},
{
"name": "HTML",
"bytes": "173634"
},
{
"name": "JavaScript",
"bytes": "448151"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12200962"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
import pygame
import widget.button
import screens.sound as sound
class ExitScreen:
def __init__(self, canvas, prev=None):
self.canvas = canvas
self.image = pygame.image.load('resources/screens/' + canvas.language + '/termination.jpg')
self.close_app = widget.button.Button((340, 350), (84, 80), self.close_app)
self.return_button = widget.button.Button((599, 351), (87, 76), self.return_to_prev)
self.prev = prev
pygame.mixer.music.pause()
# Updates this 'termination' screen.
def update(self):
pass
# Handles an event.
def on_event(self, event):
self.close_app.on_event(event)
self.return_button.on_event(event)
# Reacts to the user confirming to close the application
def close_app(self, x, y, cursor):
sound.Plopperdeplop.tune(self, 'click')
pygame.time.wait(100)
self.canvas.quitGame()
# Reacts to the user confirming to return to the previous screen
def return_to_prev(self, x, y, cursor):
sound.Plopperdeplop.tune(self, 'click')
self.canvas.set_screen(self.prev)
pygame.mixer.music.unpause()
# Draws the components of this 'termination' screen.
def draw(self, surface):
surface.blit(self.image, (0, 0))
| {
"content_hash": "9b8afb0bbdfaddccbc3cad298ae2ffd4",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 99,
"avg_line_length": 34.78378378378378,
"alnum_prop": 0.6449106449106449,
"repo_name": "sinoz/boat-wars",
"id": "60f7ecd09815b32df25b39ff925c25f303221fdf",
"size": "1287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/screens/termination.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "88691"
},
{
"name": "SQLPL",
"bytes": "648"
}
],
"symlink_target": ""
} |
"""blogular URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('api.urls', namespace='api')),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration/', include('rest_auth.registration.urls')),
url(r'^', include('blog.urls', namespace='blog')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| {
"content_hash": "45dd7e64afb27e1f760d0a2ea8412ca7",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 77,
"avg_line_length": 42.2962962962963,
"alnum_prop": 0.6987740805604203,
"repo_name": "amaozhao/blogular",
"id": "bb2375203d8036428e2ab66107ad99ea3eae84fd",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blogular/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "630768"
},
{
"name": "HTML",
"bytes": "152862"
},
{
"name": "JavaScript",
"bytes": "855396"
},
{
"name": "Python",
"bytes": "4112262"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
from factory_alchemist import __version__
setup(name='Factory-Alchemist',
version=__version__,
packages=find_packages(exclude=['*test*']),
url='https://github.com/eduardo-matos/Factory-Alchemist',
author='Eduardo Matos',
keywords='sql sqlalchemy modelmommy orm')
| {
"content_hash": "a60698307371afdb9b32e66ec7c65188",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 63,
"avg_line_length": 33.9,
"alnum_prop": 0.6991150442477876,
"repo_name": "eduardo-matos/Factory-Alchemist",
"id": "93501e74f8e83b46aa5ec030e554775852e2c1e5",
"size": "339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10609"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
import copy
import itertools
import regex as re
import random
def reverse_enumerate(iterable):
'''
Enumerate through an iterable in reverse, reporting the index consistent
with the original iterable.
'''
return itertools.izip(reversed(xrange(len(iterable))), reversed(iterable))
def generate_repeat_units():
'''
Given canonical bases, generate a set of all possible repeat units up to
a length of four.
'''
bases = ['A', 'C', 'G', 'T']
repeat_units = set(copy.copy(bases))
for i in range(3):
for unit in copy.copy(repeat_units):
for base in bases:
repeat_units.add(unit + base)
temp_repeat_units = set()
for repeat_unit in repeat_units:
keep = True
n = len(repeat_unit)
if n > 1:
if repeat_unit[0] * (n - 1) == repeat_unit[1:n]:
keep = False
if n == 4:
if repeat_unit[0:2] == repeat_unit[2:4]:
keep = False
if keep:
temp_repeat_units.add(repeat_unit)
repeat_units = temp_repeat_units
return repeat_units
def check_repeats(repeat_1, repeat_2):
'''
Check to see if repeat_1 is a possible permutation of repeat_2.
e.g. check_repeats('AGCT', 'GCTA') is True, check_repeats('AGCT', 'ATGC')
is False.
'''
if repeat_1 == repeat_2:
return True
elif len(repeat_1) == len(repeat_2):
for i in range(1, len(repeat_1)):
shuffled_repeat = repeat_1[i:] + repeat_1[:i]
if shuffled_repeat == repeat_2:
return True
return False
def create_repeat_file(fasta_file, output_file):
'''
For a given FASTA file, enumerate all repeats to an output file.
'''
repeat_units = generate_repeat_units()
sequences = OrderedDict()
seq_name = None
seq = ''
groups = dict()
for repeat_unit in repeat_units:
groups[repeat_unit] = dict()
for other_repeat_unit in repeat_units:
groups[repeat_unit][other_repeat_unit] = \
check_repeats(repeat_unit, other_repeat_unit)
with open(fasta_file) as f:
for line in f:
if line.startswith('>'): # New FASTA entry
sequences[seq_name] = seq
seq = ''
seq_name = re.split('>|\s+',line.strip())[1]
sequences[seq_name] = ''
else:
seq += line.strip()
sequences[seq_name] = seq
with open(output_file, 'w') as OUT:
for sequence_name, sequence in sequences.items():
matches = []
for repeat_unit in repeat_units:
repeat_length = len(repeat_unit)
fragments = []
for i in range(1, len(repeat_unit)):
fragments.append(repeat_unit[:-i])
search_pattern = '({}){{2,}}({}){{0,1}}'.format(
repeat_unit,
'|'.join(fragments),
)
last_start = None
for match in re.finditer(search_pattern, sequence,
overlapped=True):
keep = True
if last_start:
if match.start() - repeat_length == last_start:
keep = False
if keep:
matches.append({
'sequence': match.group(0),
'repeat_unit': repeat_unit,
'start': match.start(),
'end': match.end(),
})
last_start = match.start()
sort = sorted(matches, key=lambda x: (x['start'], -x['end']))
kept_matches = []
i = len(sort) - 1
while i >= 0:
keep = True
j = i - 1
while j >= 0:
if (
sort[i]['start'] >= sort[j]['start'] and
sort[i]['end'] <= sort[j]['end'] and
groups[sort[i]['repeat_unit']][sort[j]['repeat_unit']]
):
keep = False
break
if sort[i]['start'] > sort[j]['end']:
break
j = j - 1
if keep:
kept_matches.append(sort[i])
i = i - 1
for match in sorted(kept_matches, key=lambda x: x['start']):
OUT.write('\t'.join((
sequence_name,
match['sequence'],
str(len(match['repeat_unit'])),
match['repeat_unit'],
str(match['start']),
str(match['end']),
)) + '\n')
def extract_repeat_file_sample(repeat_file, sample_file, total):
'''
Extract a random sample of repeat loci from a genome-wide list
'''
with open(repeat_file, 'r', 1) as f:
for i, l in enumerate(f):
pass
i += 1
keep = dict(zip(random.sample(range(0, i), total),itertools.repeat(0)))
with open(repeat_file, 'r', 1) as f, open(sample_file, 'w') as OUT:
for x, line in enumerate(f):
if x in keep:
OUT.write(line)
| {
"content_hash": "cbe230dd779e5b9f3f0094e5b05df48f",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 78,
"avg_line_length": 27.836633663366335,
"alnum_prop": 0.45296105281878,
"repo_name": "NIEHS/muver",
"id": "4bd336bb32d987a63ea82b2b27122b6ce7159945",
"size": "5623",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "muver/repeats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "153097"
}
],
"symlink_target": ""
} |
from distutils.core import setup
import os, sys
bbdir = os.path.join(sys.prefix, 'share', 'bitbake')
docdir = os.path.join(sys.prefix, 'share', 'doc')
# bbdir = os.path.join('bitbake')
# docdir = os.path.join('doc')
def clean_doc(type):
origpath = os.path.abspath(os.curdir)
os.chdir(os.path.join(origpath, 'doc', 'manual'))
make = os.environ.get('MAKE') or 'make'
os.system('%s clean-%s' % (make, type))
def generate_doc(type):
origpath = os.path.abspath(os.curdir)
os.chdir(os.path.join(origpath, 'doc', 'manual'))
make = os.environ.get('MAKE') or 'make'
ret = os.system('%s %s' % (make, type))
if ret != 0:
print "ERROR: Unable to generate html documentation."
sys.exit(ret)
os.chdir(origpath)
if 'bdist' in sys.argv[1:]:
generate_doc('html')
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'lib'))
import bb
import glob
setup(name='bitbake',
version=bb.__version__,
license='GPLv2',
url='http://developer.berlios.de/projects/bitbake/',
description='BitBake build tool',
long_description='BitBake is a simple tool for the execution of tasks. It is derived from Portage, which is the package management system used by the Gentoo Linux distribution. It is most commonly used to build packages, as it can easily use its rudamentary inheritence to abstract common operations, such as fetching sources, unpacking them, patching them, compiling them, and so on. It is the basis of the OpenEmbedded project, which is being used for OpenZaurus, Familiar, and a number of other Linux distributions.',
author='Chris Larson',
author_email='[email protected]',
packages=['bb', 'bb.fetch', 'bb.parse', 'bb.parse.parse_py'],
package_dir={'bb': os.path.join('lib', 'bb')},
scripts=[os.path.join('bin', 'bitbake'),
os.path.join('bin', 'bbimage')],
data_files=[(os.path.join(bbdir, 'conf'), [os.path.join('conf', 'bitbake.conf')]),
(os.path.join(bbdir, 'classes'), [os.path.join('classes', 'base.bbclass')]),
(os.path.join(docdir, 'bitbake-%s' % bb.__version__, 'html'), glob.glob(os.path.join('doc', 'manual', 'html', '*.html'))),
(os.path.join(docdir, 'bitbake-%s' % bb.__version__, 'pdf'), glob.glob(os.path.join('doc', 'manual', 'pdf', '*.pdf'))),],
)
if 'bdist' in sys.argv[1:]:
clean_doc('html')
| {
"content_hash": "832b36798c3513cb015ce752ccdc3d7d",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 527,
"avg_line_length": 48.22,
"alnum_prop": 0.6366652841144753,
"repo_name": "KDAB/OpenEmbedded-Archos",
"id": "0433af32b3fdf4446dccc3f73bb26fc4f27a3bcf",
"size": "3203",
"binary": false,
"copies": "5",
"ref": "refs/heads/archos-gen8",
"path": "bitbake/setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "29567"
},
{
"name": "C",
"bytes": "3073190"
},
{
"name": "C++",
"bytes": "353639"
},
{
"name": "D",
"bytes": "7847"
},
{
"name": "JavaScript",
"bytes": "94944"
},
{
"name": "Objective-C",
"bytes": "262655"
},
{
"name": "PHP",
"bytes": "9144"
},
{
"name": "Perl",
"bytes": "99970"
},
{
"name": "Prolog",
"bytes": "13844"
},
{
"name": "Python",
"bytes": "705495"
},
{
"name": "Racket",
"bytes": "1369"
},
{
"name": "Shell",
"bytes": "490073"
},
{
"name": "VimL",
"bytes": "4556"
}
],
"symlink_target": ""
} |
az.plot_posterior(data, var_names=['mu', '^the'], filter_vars="regex", rope=(-1, 1))
| {
"content_hash": "6983e4d3f3657a82e2609e41d0ba44c0",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 84,
"avg_line_length": 85,
"alnum_prop": 0.6235294117647059,
"repo_name": "mcmcplotlib/mcmcplotlib",
"id": "4a58d4209f7202869e3de52975128fbefd2fa93a",
"size": "85",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "api/generated/arviz-plot_posterior-3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "91689"
}
],
"symlink_target": ""
} |
import tornado.httpserver
import tornado.gen
import tornado.ioloop
import tornado.web
import os
_CHUNK_OF_128_BYTES = '000000000000000,111111111111111,222222222222222,333333333333333,444444444444444,' \
'555555555555555,666666666666666,777777777777777\n'
class FakeCsvHandler(tornado.web.RequestHandler):
_COLUMN_HEADERS = 'COL_0,COL_1,COL_2,COL_3,COL_4,COL_5,COL_6,COL_7\n'
_ONE_KILOBYTE_CHUNK = ''.join(_CHUNK_OF_128_BYTES for _ in range(8))
_HUNDRED_KILOBYTE_CHUNK = ''.join(_CHUNK_OF_128_BYTES for _ in range(800))
@tornado.gen.coroutine
def get(self, str_kilobyte_count):
kilobyte_count = int(str_kilobyte_count)
kilobytes_to_hundred = kilobyte_count % 100
self._set_file_headers(kilobyte_count)
self.write(self._COLUMN_HEADERS)
for _ in range(int(kilobyte_count // 100)):
self.write(self._HUNDRED_KILOBYTE_CHUNK)
yield self.flush()
for _ in range(kilobytes_to_hundred):
self.write(self._ONE_KILOBYTE_CHUNK)
yield self.flush()
def _set_file_headers(self, size_in_kilobytes):
num_bytes = size_in_kilobytes * 1024 + len(self._COLUMN_HEADERS)
self.set_header('Content-Length', num_bytes)
self.set_header('Content-type', 'text/csv')
self.set_header('Content-disposition', 'attachment;filename={}_kilobytes.csv'.format(size_in_kilobytes))
def make_app():
return tornado.web.Application([
(r"/fake-csv/([0-9]+)", FakeCsvHandler),
])
if __name__ == "__main__":
server = tornado.httpserver.HTTPServer(make_app())
server.bind(int(os.getenv('PORT', '9090')))
server.start(0)
tornado.ioloop.IOLoop.current().start()
| {
"content_hash": "f31e5f97b8e42894d166b79ef84e387c",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 112,
"avg_line_length": 35.183673469387756,
"alnum_prop": 0.6571925754060325,
"repo_name": "butla/fake_csv_server",
"id": "5c6fb9b69a2f4f67c0e90bdced8a7e20c817fbf7",
"size": "1724",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fake_csv_server/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1822"
}
],
"symlink_target": ""
} |
import os
import shutil
import wx
from .. import context
from ..action import ActionInfo
from ..pluginapi import Plugin
from ..publish import PUBLISHER, RideExecuteSpecXmlImport
from .xmlreaders import get_name_from_xml
class SpecImporterPlugin(Plugin):
HEADER = 'Import Library Spec XML'
def enable(self):
self.register_action(ActionInfo('Tools', self.HEADER,
self.execute_spec_import, position=83))
PUBLISHER.subscribe(self._ps_on_execute_spec_import, RideExecuteSpecXmlImport)
def disable(self):
self.unsubscribe_all()
self.unregister_actions()
def _ps_on_execute_spec_import(self, message):
self.execute_spec_import()
def execute_spec_import(self):
path = self._get_path_to_library_spec()
if self._is_valid_path(path):
self._store_spec(path)
self._execute_namespace_update()
def _is_valid_path(self, path):
return path and os.path.isfile(path)
def _execute_namespace_update(self):
self.model.update_namespace()
def _get_path_to_library_spec(self):
wildcard = ('Library Spec XML|*.xml|All Files|*.*')
dlg = wx.FileDialog(self.frame,
message='Import Library Spec XML',
wildcard=wildcard,
defaultDir=self.model.default_dir) # DEBUG
# , style=wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
else:
path = None
dlg.Destroy()
return path
def _store_spec(self, path):
name = get_name_from_xml(path)
if name:
shutil.copy(path, os.path.join(context.LIBRARY_XML_DIRECTORY, name+'.xml'))
wx.MessageBox('Library "%s" imported\nfrom "%s"\nThis may require RIDE restart.' % (name, path), 'Info', wx.OK | wx.ICON_INFORMATION)
else:
wx.MessageBox('Could not import library from file "%s"' % path, 'Import failed', wx.OK | wx.ICON_ERROR)
| {
"content_hash": "40da73573455b98027760eec966e9123",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 145,
"avg_line_length": 34.11666666666667,
"alnum_prop": 0.6013678553981436,
"repo_name": "robotframework/RIDE",
"id": "f1b9688f1cc126b6de1f083f63f3f1dc76fd38ab",
"size": "2691",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/robotide/spec/specimporter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "31131"
},
{
"name": "HTML",
"bytes": "96342"
},
{
"name": "JavaScript",
"bytes": "42656"
},
{
"name": "Python",
"bytes": "3703410"
},
{
"name": "RobotFramework",
"bytes": "378004"
},
{
"name": "Shell",
"bytes": "1873"
}
],
"symlink_target": ""
} |
class WordDictionary(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.trie = {}
def addWord(self, word):
"""
Adds a word into the data structure.
:type word: str
:rtype: void
"""
trie=self.trie
for c in word:
if c not in trie:
trie[c] = {}
trie = trie[c]
trie['#'] = '#'
def search(self, word, trie = None):
"""
Returns if the word is in the data structure. A word could
contain the dot character '.' to represent any one letter.
:type word: str
:rtype: bool
"""
if not trie:
trie = self.trie
if not word:
if '#' in trie:
return True
else:
return False
c = word[0]
if c in trie:
return self.search(word[1:],trie[c])
elif c == '.':
for cc in trie:
if cc != '#' and self.search(word[1:],trie[cc]):
return True
return False
# Your WordDictionary object will be instantiated and called as such:
# wordDictionary = WordDictionary()
# wordDictionary.addWord("word")
# wordDictionary.search("pattern") | {
"content_hash": "d961399534fa541e31cf8033dda7e44c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 69,
"avg_line_length": 27.80851063829787,
"alnum_prop": 0.486610558530987,
"repo_name": "saai/codingbitch",
"id": "f7a9b688f1a9ab7a54f71599354f3c9ca156d3ba",
"size": "1307",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backtracking/wordDictionaryUsingTrie.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "533"
},
{
"name": "Python",
"bytes": "160609"
}
],
"symlink_target": ""
} |
""" Personality formatter """
from aquilon.aqdb.model import Personality
from aquilon.worker.formats.formatters import ObjectFormatter
from aquilon.worker.formats.list import ListFormatter
from aquilon.worker.templates.base import TEMPLATE_EXTENSION
class ThresholdedPersonality(object):
def __init__(self, dbpersonality, thresholds):
self.dbpersonality = dbpersonality
if not thresholds:
thresholds = {}
self.threshold = thresholds.get('threshold')
self.maintenance_threshold = thresholds.get('maintenance_threshold')
class PersonalityList(list):
"""Holds instances of ThresholdedPersonality."""
class PersonalityListFormatter(ListFormatter):
protocol = "aqdsystems_pb2"
def format_proto(self, tpl, skeleton=None):
if not skeleton:
skeleton = self.loaded_protocols[self.protocol].PersonalityList()
for personality in tpl:
self.redirect_proto(personality, skeleton.personalities.add())
return skeleton
class PersonalityFormatter(ObjectFormatter):
protocol = "aqdsystems_pb2"
def format_raw(self, personality, indent=""):
# Transparently handle Personality and ThresholdedPersonality
has_threshold = False
if hasattr(personality, "dbpersonality"):
threshold = personality.threshold
maintenance_threshold = personality.maintenance_threshold
has_threshold = True
personality = personality.dbpersonality
description = "Host"
if personality.is_cluster:
description = "Cluster"
details = [indent + "{0} Personality: {1.name} Archetype: {1.archetype.name}"
.format(description, personality)]
details.append(indent + " Environment: {0.name}"
.format(personality.host_environment))
details.append(indent + " Owned by {0:c}: {0.grn}"
.format(personality.owner_grn))
for grn_rec in sorted(personality._grns, key=lambda x: x.target):
details.append(indent + " Used by {0.grn:c}: {0.grn.grn} "
"[{0.target}]".format(grn_rec))
if personality.config_override:
details.append(indent + " Config override: enabled")
details.append(indent + " Template: {0.archetype.name}/personality/{0.name}/config{1}"
.format(personality, TEMPLATE_EXTENSION))
if has_threshold:
details.append(indent + " Threshold: {0}".format(threshold))
details.append(indent + " Maintenance Threshold: {0}"
.format(maintenance_threshold))
if personality.cluster_required:
details.append(indent + " Requires clustered hosts")
for service in personality.services:
details.append(indent + " Required Service: {0.name}"
.format(service))
features = personality.features[:]
features.sort(key=lambda x: (x.feature.feature_type,
x.feature.post_personality,
x.feature.name))
for link in features:
if link.feature.post_personality:
flagstr = " [post_personality]"
elif link.feature.post_personality_allowed:
flagstr = " [pre_personality]"
else:
flagstr = ""
details.append(indent + " {0:c}: {0.name}{1}"
.format(link.feature, flagstr))
if link.model:
details.append(indent + " {0:c}: {0.name} {1:c}: {1.name}"
.format(link.model.vendor, link.model))
if link.interface_name:
details.append(indent + " Interface: {0.interface_name}"
.format(link))
if personality.comments:
details.append(indent + " Comments: {0.comments}"
.format(personality))
for cltype, info in personality.cluster_infos.items():
details.append(indent + " Extra settings for %s clusters:" % cltype)
if cltype == "esx":
details.append(indent + " VM host capacity function: %s" %
info.vmhost_capacity_function)
details.append(indent + " VM host overcommit factor: %s" %
info.vmhost_overcommit_memory)
return "\n".join(details)
def format_proto(self, personality, skeleton=None):
container = skeleton
if not container:
container = self.loaded_protocols[self.protocol].PersonalityList()
skeleton = container.personalities.add()
# Transparently handle Personality and ThresholdedPersonality
threshold = None
if hasattr(personality, "dbpersonality"):
threshold = personality.threshold
personality = personality.dbpersonality
self.add_personality_data(skeleton, personality)
if threshold is not None:
skeleton.threshold = threshold
features = personality.features[:]
features.sort(key=lambda x: (x.feature.feature_type,
x.feature.post_personality,
x.feature.name))
for link in features:
self.add_featurelink_msg(skeleton.features.add(), link)
for service in personality.services:
rsvc_msg = skeleton.required_services.add()
rsvc_msg.service = service.name
if personality.comments:
skeleton.comments = personality.comments
skeleton.config_override = personality.config_override
skeleton.cluster_required = personality.cluster_required
return container
ObjectFormatter.handlers[Personality] = PersonalityFormatter()
ObjectFormatter.handlers[ThresholdedPersonality] = PersonalityFormatter()
ObjectFormatter.handlers[PersonalityList] = PersonalityListFormatter()
class SimplePersonalityList(list):
"""Holds a list of personalities for which a list will be formatted
in a simple (name-only) manner."""
class SimplePersonalityListFormatter(PersonalityListFormatter):
protocol = "aqdsystems_pb2"
def format_raw(self, result, indent=""):
return str("\n".join([indent + "{0.archetype.name}/{0.name}".format(obj) for obj in result]))
def csv_fields(self, obj):
return (obj.archetype.name, obj.name,)
def format_proto(self, tpl, skeleton=None):
if not skeleton:
skeleton = self.loaded_protocols[self.protocol].PersonalityList()
for personality in tpl:
container = skeleton.personalities.add()
container.name = str(personality)
container.archetype.name = str(personality.archetype.name)
container.host_environment = str(personality.host_environment)
container.owner_eonid = personality.owner_eon_id
return skeleton
ObjectFormatter.handlers[SimplePersonalityList] = SimplePersonalityListFormatter()
| {
"content_hash": "1d27fac30fc1e3f26c732fd490ee4784",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 101,
"avg_line_length": 41.06896551724138,
"alnum_prop": 0.6137699412258606,
"repo_name": "stdweird/aquilon",
"id": "95b59501c5c81f924a6444e0685742eea9ac74e2",
"size": "7849",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/python2.6/aquilon/worker/formats/personality.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "3791"
},
{
"name": "Makefile",
"bytes": "5024"
},
{
"name": "Mako",
"bytes": "3996"
},
{
"name": "PLSQL",
"bytes": "69088"
},
{
"name": "Perl",
"bytes": "5030"
},
{
"name": "Python",
"bytes": "4257490"
},
{
"name": "SQLPL",
"bytes": "869"
},
{
"name": "Shell",
"bytes": "22083"
}
],
"symlink_target": ""
} |
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Sample molecule mapper.
"""
from sqlalchemy.orm import relationship
from everest.repositories.rdb.utils import mapper
from thelma.entities.sample import Molecule
from thelma.entities.sample import Sample
from thelma.entities.sample import SampleMolecule
__docformat__ = 'reStructuredText en'
__all__ = ['create_mapper']
def create_mapper(sample_molecule_tbl):
"Mapper factory."
m = mapper(SampleMolecule, sample_molecule_tbl,
id_attribute='molecule_id',
properties=dict(
sample=relationship(Sample, uselist=False,
back_populates='sample_molecules'),
molecule=relationship(Molecule, uselist=False,
back_populates='sample_molecules',
# lazy='joined'
),
),
)
return m
| {
"content_hash": "37fd02c6ea2464be339e1a3703f67ad7",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 80,
"avg_line_length": 32.0625,
"alnum_prop": 0.6393762183235867,
"repo_name": "helixyte/TheLMA",
"id": "739f9dc92a299f791ae34ac060485864aba3889f",
"size": "1026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thelma/repositories/rdb/mappers/samplemolecule.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "3126"
},
{
"name": "Python",
"bytes": "3329729"
},
{
"name": "Shell",
"bytes": "3071"
}
],
"symlink_target": ""
} |
'''Substitution Matrix data structures and constants'''
from .substitution_matrix import SubstitutionMatrix
from .dna import DNA
from .dna_simple import DNA_SIMPLE
from .blosum62 import BLOSUM62
| {
"content_hash": "b840bc1b4865ff49680cf0b4b216fe0e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 55,
"avg_line_length": 39,
"alnum_prop": 0.8205128205128205,
"repo_name": "klavinslab/coral",
"id": "a0552e97a0f4ed79eba8664cd9a0b1138c076070",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coral/analysis/_sequencing/substitution_matrices/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "424580"
},
{
"name": "Shell",
"bytes": "1681"
}
],
"symlink_target": ""
} |
from oauthlib.oauth1.rfc5849.utils import *
from tests.unittest import TestCase
class UtilsTests(TestCase):
sample_params_list = [
("notoauth", "shouldnotbehere"),
("oauth_consumer_key", "9djdj82h48djs9d2"),
("oauth_token", "kkk9d7dh3k39sjv7"),
("notoautheither", "shouldnotbehere")
]
sample_params_dict = {
"notoauth": "shouldnotbehere",
"oauth_consumer_key": "9djdj82h48djs9d2",
"oauth_token": "kkk9d7dh3k39sjv7",
"notoautheither": "shouldnotbehere"
}
sample_params_unicode_list = [
("notoauth", "shouldnotbehere"),
("oauth_consumer_key", "9djdj82h48djs9d2"),
("oauth_token", "kkk9d7dh3k39sjv7"),
("notoautheither", "shouldnotbehere")
]
sample_params_unicode_dict = {
"notoauth": "shouldnotbehere",
"oauth_consumer_key": "9djdj82h48djs9d2",
"oauth_token": "kkk9d7dh3k39sjv7",
"notoautheither": "shouldnotbehere"
}
authorization_header = """OAuth realm="Example",
oauth_consumer_key="9djdj82h48djs9d2",
oauth_token="kkk9d7dh3k39sjv7",
oauth_signature_method="HMAC-SHA1",
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a",
oauth_signature="djosJKDKJSD8743243%2Fjdk33klY%3D" """.strip()
bad_authorization_headers = (
"OAuth",
"OAuth oauth_nonce=",
"Negotiate b2F1dGhsaWI=",
"OA",
)
def test_filter_params(self):
# The following is an isolated test function used to test the filter_params decorator.
@filter_params
def special_test_function(params, realm=None):
""" I am a special test function """
return 'OAuth ' + ','.join(['='.join([k, v]) for k, v in params])
# check that the docstring got through
self.assertEqual(special_test_function.__doc__, " I am a special test function ")
# Check that the decorator filtering works as per design.
# Any param that does not start with 'oauth'
# should not be present in the filtered params
filtered_params = special_test_function(self.sample_params_list)
self.assertNotIn("notoauth", filtered_params)
self.assertIn("oauth_consumer_key", filtered_params)
self.assertIn("oauth_token", filtered_params)
self.assertNotIn("notoautheither", filtered_params)
def test_filter_oauth_params(self):
# try with list
# try with list
# try with list
self.assertEqual(len(self.sample_params_list), 4)
# Any param that does not start with 'oauth'
# should not be present in the filtered params
filtered_params = filter_oauth_params(self.sample_params_list)
self.assertEqual(len(filtered_params), 2)
self.assertTrue(filtered_params[0][0].startswith('oauth'))
self.assertTrue(filtered_params[1][0].startswith('oauth'))
# try with dict
# try with dict
# try with dict
self.assertEqual(len(self.sample_params_dict), 4)
# Any param that does not start with 'oauth'
# should not be present in the filtered params
filtered_params = filter_oauth_params(self.sample_params_dict)
self.assertEqual(len(filtered_params), 2)
self.assertTrue(filtered_params[0][0].startswith('oauth'))
self.assertTrue(filtered_params[1][0].startswith('oauth'))
def test_escape(self):
self.assertRaises(ValueError, escape, b"I am a string type. Not a unicode type.")
self.assertEqual(escape("I am a unicode type."), "I%20am%20a%20unicode%20type.")
self.assertIsInstance(escape("I am a unicode type."), str)
def test_unescape(self):
self.assertRaises(ValueError, unescape, b"I am a string type. Not a unicode type.")
self.assertEqual(unescape("I%20am%20a%20unicode%20type."), 'I am a unicode type.')
self.assertIsInstance(unescape("I%20am%20a%20unicode%20type."), str)
def test_parse_authorization_header(self):
# make us some headers
authorization_headers = parse_authorization_header(self.authorization_header)
# is it a list?
self.assertIsInstance(authorization_headers, list)
# are the internal items tuples?
for header in authorization_headers:
self.assertIsInstance(header, tuple)
# are the internal components of each tuple unicode?
for k, v in authorization_headers:
self.assertIsInstance(k, str)
self.assertIsInstance(v, str)
# let's check the parsed headers created
correct_headers = [
("oauth_nonce", "7d8f3e4a"),
("oauth_timestamp", "137131201"),
("oauth_consumer_key", "9djdj82h48djs9d2"),
('oauth_signature', 'djosJKDKJSD8743243%2Fjdk33klY%3D'),
('oauth_signature_method', 'HMAC-SHA1'),
('oauth_token', 'kkk9d7dh3k39sjv7'),
('realm', 'Example')]
self.assertEqual(sorted(authorization_headers), sorted(correct_headers))
# Check against malformed headers.
for header in self.bad_authorization_headers:
self.assertRaises(ValueError, parse_authorization_header, header)
| {
"content_hash": "ba40ae485c1b6ccd8e41705828e5cff8",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 94,
"avg_line_length": 38.802919708029194,
"alnum_prop": 0.626410835214447,
"repo_name": "idan/oauthlib",
"id": "013c71a910da91b623c5addfa2f3e61a4a6293c0",
"size": "5340",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/oauth1/rfc5849/test_utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1617"
},
{
"name": "Python",
"bytes": "661763"
}
],
"symlink_target": ""
} |
from subprocess import Popen, PIPE
less = Popen("less", stdin=PIPE)
less.communicate(bytes("Text I want to send to less", encoding='utf-8'))
| {
"content_hash": "68145399a9803b39048d0adca460e066",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 72,
"avg_line_length": 28.6,
"alnum_prop": 0.7272727272727273,
"repo_name": "boarpig/tidbits",
"id": "eb772be7970091fc2fdf8636208c41982da7f8cf",
"size": "162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipe_to_less.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10137"
},
{
"name": "Shell",
"bytes": "303"
}
],
"symlink_target": ""
} |
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
import os
from django.core.exceptions import ImproperlyConfigured
from unipath import Path
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
BASE_DIR = Path(__file__).ancestor(4)
SRC_DIR = Path(__file__).ancestor(3)
def get_config(setting):
try:
return os.environ[setting]
except KeyError:
error_msg = "Set the {0} environnement variable".format(setting)
raise ImproperlyConfigured(error_msg)
VERSION = get_config('MAJOR_RELEASE') + '.' + get_config('MINOR_RELEASE') + '.' + get_config('BUILD_NUMBER')
ENVIRONMENT_ID = get_config('RANCHER_ENVIRONMENT_ID')
DOMAIN = get_config('AMM_MYSQL_DOMAIN')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = get_config('DJANGO_DEBUG') == 'True'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = get_config("SECRET_KEY")
ALLOWED_HOSTS = [
get_config('DJANGO_HOST'),
'127.0.0.1',
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Third party libraries
'rest_framework',
'rest_framework_swagger',
# Custom django app
'api.apps.ApiConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = SRC_DIR.child('static')
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": get_config('CACHE_REDIS_LOCATION'),
"OPTIONS": {
"CLIENT_CLASS": get_config('CACHE_REDIS_CLIENT_CLASS'),
"SERIALIZER": "django_redis.serializers.json.JSONSerializer",
}
}
}
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
'EXCEPTION_HANDLER': 'api.api_exception_handler.api_exception_handler',
}
| {
"content_hash": "e07887a10dd6d6b4064ac1a5b75e9788",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 108,
"avg_line_length": 27.32857142857143,
"alnum_prop": 0.6738107684265552,
"repo_name": "epfl-idevelop/amm",
"id": "0eb6e1479821846831ffe4ab41ad9043011e48db",
"size": "3826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/config/settings/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7703"
},
{
"name": "Nginx",
"bytes": "1352"
},
{
"name": "Python",
"bytes": "79542"
},
{
"name": "Shell",
"bytes": "766"
}
],
"symlink_target": ""
} |
"""Blowfish symmetric cipher
Blowfish_ is a symmetric block cipher designed by Bruce Schneier.
It has a fixed data block size of 8 bytes and its keys can vary in length
from 32 to 448 bits (4 to 56 bytes).
Blowfish is deemed secure and it is fast. However, its keys should be chosen
to be big enough to withstand a brute force attack (e.g. at least 16 bytes).
As an example, encryption can be done as follows:
>>> from Crypto.Cipher import Blowfish
>>> from Crypto import Random
>>> from struct import pack
>>>
>>> bs = Blowfish.block_size
>>> key = b'An arbitrarily long key'
>>> iv = Random.new().read(bs)
>>> cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv)
>>> plaintext = b'docendo discimus '
>>> plen = bs - divmod(len(plaintext),bs)[1]
>>> padding = [plen]*plen
>>> padding = pack('b'*plen, *padding)
>>> msg = iv + cipher.encrypt(plaintext + padding)
.. _Blowfish: http://www.schneier.com/blowfish.html
:undocumented: __revision__, __package__
"""
__revision__ = "$Id$"
from Crypto.Cipher import blockalgo
from Crypto.Cipher import _Blowfish
class BlowfishCipher (blockalgo.BlockAlgo):
"""Blowfish cipher object"""
def __init__(self, key, *args, **kwargs):
"""Initialize a Blowfish cipher object
See also `new()` at the module level."""
blockalgo.BlockAlgo.__init__(self, _Blowfish, key, *args, **kwargs)
def new(key, *args, **kwargs):
"""Create a new Blowfish cipher
:Parameters:
key : byte string
The secret key to use in the symmetric cipher.
Its length can vary from 4 to 56 bytes.
:Keywords:
mode : a *MODE_** constant
The chaining mode to use for encryption or decryption.
Default is `MODE_ECB`.
IV : byte string
The initialization vector to use for encryption or decryption.
It is ignored for `MODE_ECB` and `MODE_CTR`.
For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption
and `block_size` +2 bytes for decryption (in the latter case, it is
actually the *encrypted* IV which was prefixed to the ciphertext).
It is mandatory.
For all other modes, it must be `block_size` bytes longs.
counter : callable
(*Only* `MODE_CTR`). A stateful function that returns the next
*counter block*, which is a byte string of `block_size` bytes.
For better performance, use `Crypto.Util.Counter`.
segment_size : integer
(*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext
are segmented in.
It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8.
:Return: a `BlowfishCipher` object
"""
return BlowfishCipher(key, *args, **kwargs)
#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`.
MODE_ECB = 1
#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`.
MODE_CBC = 2
#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`.
MODE_CFB = 3
#: This mode should not be used.
MODE_PGP = 4
#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`.
MODE_OFB = 5
#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`.
MODE_CTR = 6
#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`.
MODE_OPENPGP = 7
#: Size of a data block (in bytes)
block_size = 8
#: Size of a key (in bytes)
key_size = xrange(4,56+1)
| {
"content_hash": "f3fe67fb07a93b92d1836468f6d11682",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 86,
"avg_line_length": 33.80808080808081,
"alnum_prop": 0.6495368987152674,
"repo_name": "nmercier/linux-cross-gcc",
"id": "2ce78e9d4b317d5093d64521df4f445907bf4918",
"size": "4368",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "linux/lib/python2.7/dist-packages/Crypto/Cipher/Blowfish.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1047092"
},
{
"name": "C++",
"bytes": "151335"
},
{
"name": "Makefile",
"bytes": "82796"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "29123266"
},
{
"name": "Shell",
"bytes": "14668"
}
],
"symlink_target": ""
} |
"""The tests for the rest command platform."""
import asyncio
import aiohttp
import homeassistant.components.rest_command as rc
from homeassistant.setup import setup_component
from tests.common import (
get_test_home_assistant, assert_setup_component)
class TestRestCommandSetup(object):
"""Test the rest command component."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.config = {
rc.DOMAIN: {'test_get': {
'url': 'http://example.com/'
}}
}
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component(self):
"""Test setup component."""
with assert_setup_component(1):
setup_component(self.hass, rc.DOMAIN, self.config)
def test_setup_component_timeout(self):
"""Test setup component timeout."""
self.config[rc.DOMAIN]['test_get']['timeout'] = 10
with assert_setup_component(1):
setup_component(self.hass, rc.DOMAIN, self.config)
def test_setup_component_test_service(self):
"""Test setup component and check if service exits."""
with assert_setup_component(1):
setup_component(self.hass, rc.DOMAIN, self.config)
assert self.hass.services.has_service(rc.DOMAIN, 'test_get')
class TestRestCommandComponent(object):
"""Test the rest command component."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.url = "https://example.com/"
self.config = {
rc.DOMAIN: {
'get_test': {
'url': self.url,
'method': 'get',
},
'post_test': {
'url': self.url,
'method': 'post',
},
'put_test': {
'url': self.url,
'method': 'put',
},
'delete_test': {
'url': self.url,
'method': 'delete',
},
}
}
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_tests(self):
"""Setup test config and test it."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
assert self.hass.services.has_service(rc.DOMAIN, 'get_test')
assert self.hass.services.has_service(rc.DOMAIN, 'post_test')
assert self.hass.services.has_service(rc.DOMAIN, 'put_test')
assert self.hass.services.has_service(rc.DOMAIN, 'delete_test')
def test_rest_command_timeout(self, aioclient_mock):
"""Call a rest command with timeout."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.get(self.url, exc=asyncio.TimeoutError())
self.hass.services.call(rc.DOMAIN, 'get_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_aiohttp_error(self, aioclient_mock):
"""Call a rest command with aiohttp exception."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.get(self.url, exc=aiohttp.errors.ClientError())
self.hass.services.call(rc.DOMAIN, 'get_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_http_error(self, aioclient_mock):
"""Call a rest command with status code 400."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.get(self.url, status=400)
self.hass.services.call(rc.DOMAIN, 'get_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_auth(self, aioclient_mock):
"""Call a rest command with auth credential."""
data = {
'username': 'test',
'password': '123456',
}
self.config[rc.DOMAIN]['get_test'].update(data)
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.get(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'get_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_form_data(self, aioclient_mock):
"""Call a rest command with post form data."""
data = {
'payload': 'test'
}
self.config[rc.DOMAIN]['post_test'].update(data)
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.post(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'post_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == b'test'
def test_rest_command_get(self, aioclient_mock):
"""Call a rest command with get."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.get(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'get_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_delete(self, aioclient_mock):
"""Call a rest command with delete."""
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.delete(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'delete_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
def test_rest_command_post(self, aioclient_mock):
"""Call a rest command with post."""
data = {
'payload': 'data',
}
self.config[rc.DOMAIN]['post_test'].update(data)
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.post(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'post_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == b'data'
def test_rest_command_put(self, aioclient_mock):
"""Call a rest command with put."""
data = {
'payload': 'data',
}
self.config[rc.DOMAIN]['put_test'].update(data)
with assert_setup_component(4):
setup_component(self.hass, rc.DOMAIN, self.config)
aioclient_mock.put(self.url, content=b'success')
self.hass.services.call(rc.DOMAIN, 'put_test', {})
self.hass.block_till_done()
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == b'data'
| {
"content_hash": "11d1988301abdafed0d443bb4688b9b7",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 71,
"avg_line_length": 32.63677130044843,
"alnum_prop": 0.5846386369881835,
"repo_name": "morphis/home-assistant",
"id": "a62bddc4a0fa23c884380e100d88fd387483dec0",
"size": "7278",
"binary": false,
"copies": "4",
"ref": "refs/heads/snap-support",
"path": "tests/components/test_rest_command.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1601137"
},
{
"name": "Python",
"bytes": "5600477"
},
{
"name": "Ruby",
"bytes": "517"
},
{
"name": "Shell",
"bytes": "15144"
}
],
"symlink_target": ""
} |
from google.cloud import discoveryengine_v1beta
async def sample_import_documents():
# Create a client
client = discoveryengine_v1beta.DocumentServiceAsyncClient()
# Initialize request argument(s)
inline_source = discoveryengine_v1beta.InlineSource()
inline_source.documents.schema_id = "schema_id_value"
request = discoveryengine_v1beta.ImportDocumentsRequest(
inline_source=inline_source,
parent="parent_value",
)
# Make the request
operation = client.import_documents(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END discoveryengine_v1beta_generated_DocumentService_ImportDocuments_async]
| {
"content_hash": "4e7476f47d50a235d057fa0f0f8fe3da",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 78,
"avg_line_length": 28.296296296296298,
"alnum_prop": 0.731675392670157,
"repo_name": "googleapis/google-cloud-python",
"id": "1ab3aa1b0e94fb179f5b4b9fc20a7a971831c2d2",
"size": "2176",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_import_documents_async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2895"
},
{
"name": "Python",
"bytes": "5620713"
},
{
"name": "Shell",
"bytes": "51704"
}
],
"symlink_target": ""
} |
import sqlite3 as lite
from contextlib import contextmanager
# Database operations
#####################
# TODO use this to implement a streaming API to handle BIG data
def fetch(cursor, batch_size=1e3):
'''An iterator that uses fetchmany to keep memory usage down'''
while True:
records = cursor.fetchmany(int(batch_size))
if not records:
break
for record in records:
yield record
class MapDB:
def __init__(self, path):
self._path = path
# API
#####
@contextmanager
def cursor(self):
conn = lite.connect(self._path)
with conn:
cur = conn.cursor()
yield cur
conn.close()
# SQL synthesis
def create_table(self, tablename, columns, primary_key=None):
column_defs = ['{} {}'.format(col_name, col_type) for col_name, col_type in columns]
create_table = 'CREATE TABLE {tablename} ({columns}'.format(tablename=tablename,
columns=','.join(column_defs))
if primary_key:
create_table += ', PRIMARY KEY({})'.format(primary_key)
create_table += ')'
return create_table, ()
def drop_table(self, tablename):
drop_table = 'DROP TABLE IF EXISTS {tablename}'.format(tablename=tablename)
return drop_table, ()
def add_column(self, tablename, column_name, column_type):
add_column = 'ALTER TABLE {tablename} ADD {column_name} {column_type}'.format(
tablename=tablename, column_name=column_name, column_type=column_type)
return add_column, ()
def drop_column(self, tablename, column_name):
drop_column = 'ALTER TABLE {tablename} DROP COLUMN {column_name}'.format(
tablename=tablename, column_name=column_name)
return drop_column, ()
def insert(self, tablename, record_map):
attrs, record = record_map.keys(), record_map.values()
value_placeholders = ['?'] * len(attrs)
insert = "INSERT INTO {tablename} ({columns}) VALUES ({values})".format(
tablename=tablename,
columns=','.join(attrs),
values=','.join(value_placeholders))
return insert, record
def insert_or_replace(self, tablename, record_map):
attrs, record = record_map.keys(), record_map.values()
value_placeholders = ['?'] * len(attrs)
i_or_r = "INSERT OR REPLACE INTO {tablename} ({columns}) VALUES ({values})".format(
tablename=tablename,
columns=','.join(attrs),
values=','.join(value_placeholders))
return i_or_r, record
| {
"content_hash": "84db646e1103c0a86f3bb8f413a9d358",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 88,
"avg_line_length": 32.16,
"alnum_prop": 0.650497512437811,
"repo_name": "pcattori/freetxt",
"id": "46c02360387b6db4b190150e112c4293ade36f05",
"size": "2412",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "freetxt/db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40586"
}
],
"symlink_target": ""
} |
"""
$description Japanese live TV streaming website with multiple channels including news, sports, entertainment and anime.
$url abema.tv
$type live, vod
$region Japan
"""
import hashlib
import hmac
import logging
import re
import struct
import time
import uuid
from base64 import urlsafe_b64encode
from binascii import unhexlify
from Crypto.Cipher import AES
from requests import Response
from requests.adapters import BaseAdapter
from streamlink.exceptions import NoStreamsError
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import useragents, validate
from streamlink.stream.hls import HLSStream, HLSStreamReader, HLSStreamWriter
from streamlink.utils.url import update_qsd
log = logging.getLogger(__name__)
class AbemaTVHLSStreamWriter(HLSStreamWriter):
def should_filter_sequence(self, sequence):
return "/tsad/" in sequence.segment.uri or super().should_filter_sequence(sequence)
class AbemaTVHLSStreamReader(HLSStreamReader):
__writer__ = AbemaTVHLSStreamWriter
class AbemaTVHLSStream(HLSStream):
__reader__ = AbemaTVHLSStreamReader
class AbemaTVLicenseAdapter(BaseAdapter):
'''
Handling abematv-license:// protocol to get real video key_data.
'''
STRTABLE = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
HKEY = b"3AF0298C219469522A313570E8583005A642E73EDD58E3EA2FB7339D3DF1597E"
_MEDIATOKEN_API = "https://api.abema.io/v1/media/token"
_LICENSE_API = "https://license.abema.io/abematv-hls"
_MEDIATOKEN_SCHEMA = validate.Schema({"token": validate.text})
_LICENSE_SCHEMA = validate.Schema({"k": validate.text,
"cid": validate.text})
def __init__(self, session, deviceid, usertoken):
self._session = session
self.deviceid = deviceid
self.usertoken = usertoken
super().__init__()
def _get_videokey_from_ticket(self, ticket):
params = {
"osName": "android",
"osVersion": "6.0.1",
"osLang": "ja_JP",
"osTimezone": "Asia/Tokyo",
"appId": "tv.abema",
"appVersion": "3.27.1"
}
auth_header = {"Authorization": f"Bearer {self.usertoken}"}
res = self._session.http.get(self._MEDIATOKEN_API, params=params,
headers=auth_header)
jsonres = self._session.http.json(res,
schema=self._MEDIATOKEN_SCHEMA)
mediatoken = jsonres['token']
res = self._session.http.post(self._LICENSE_API,
params={"t": mediatoken},
json={"kv": "a", "lt": ticket})
jsonres = self._session.http.json(res,
schema=self._LICENSE_SCHEMA)
cid = jsonres['cid']
k = jsonres['k']
res = sum(self.STRTABLE.find(k[i]) * (58 ** (len(k) - 1 - i)) for i in range(len(k)))
encvideokey = struct.pack('>QQ', res >> 64, res & 0xffffffffffffffff)
# HKEY:
# RC4KEY = unhexlify('DB98A8E7CECA3424D975280F90BD03EE')
# RC4DATA = unhexlify(b'D4B718BBBA9CFB7D0192A58F9E2D146A'
# b'FC5DB29E4352DE05FC4CF2C1005804BB')
# rc4 = ARC4.new(RC4KEY)
# HKEY = rc4.decrypt(RC4DATA)
h = hmac.new(unhexlify(self.HKEY),
(cid + self.deviceid).encode("utf-8"),
digestmod=hashlib.sha256)
enckey = h.digest()
aes = AES.new(enckey, AES.MODE_ECB)
return aes.decrypt(encvideokey)
def send(self, request, stream=False, timeout=None, verify=True, cert=None,
proxies=None):
resp = Response()
resp.status_code = 200
ticket = re.findall(r"abematv-license://(.*)", request.url)[0]
resp._content = self._get_videokey_from_ticket(ticket)
return resp
def close(self):
return
@pluginmatcher(re.compile(r"""
https?://abema\.tv/(
now-on-air/(?P<onair>[^?]+)
|
video/episode/(?P<episode>[^?]+)
|
channels/.+?/slots/(?P<slots>[^?]+)
)
""", re.VERBOSE))
class AbemaTV(Plugin):
_CHANNEL = "https://api.abema.io/v1/channels"
_USER_API = "https://api.abema.io/v1/users"
_PRGM_API = "https://api.abema.io/v1/video/programs/{0}"
_SLOTS_API = "https://api.abema.io/v1/media/slots/{0}"
_PRGM3U8 = "https://vod-abematv.akamaized.net/program/{0}/playlist.m3u8"
_SLOTM3U8 = "https://vod-abematv.akamaized.net/slot/{0}/playlist.m3u8"
SECRETKEY = (b"v+Gjs=25Aw5erR!J8ZuvRrCx*rGswhB&qdHd_SYerEWdU&a?3DzN9B"
b"Rbp5KwY4hEmcj5#fykMjJ=AuWz5GSMY-d@H7DMEh3M@9n2G552Us$$"
b"k9cD=3TxwWe86!x#Zyhe")
_USER_SCHEMA = validate.Schema({"profile": {"userId": validate.text},
"token": validate.text})
_CHANNEL_SCHEMA = validate.Schema({"channels": [{"id": validate.text,
"name": validate.text,
"playback": {validate.optional("dash"):
validate.text,
"hls": validate.text}}]})
_PRGM_SCHEMA = validate.Schema({"terms": [{validate.optional("onDemandType"): int}]})
_SLOT_SCHEMA = validate.Schema({"slot": {"flags": {validate.optional("timeshiftFree"): bool}}})
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.session.http.headers.update({'User-Agent': useragents.CHROME})
def _generate_applicationkeysecret(self, deviceid):
deviceid = deviceid.encode("utf-8") # for python3
# plus 1 hour and drop minute and secs
# for python3 : floor division
ts_1hour = (int(time.time()) + 60 * 60) // 3600 * 3600
time_struct = time.gmtime(ts_1hour)
ts_1hour_str = str(ts_1hour).encode("utf-8")
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(self.SECRETKEY)
tmp = h.digest()
for i in range(time_struct.tm_mon):
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(tmp)
tmp = h.digest()
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(urlsafe_b64encode(tmp).rstrip(b"=") + deviceid)
tmp = h.digest()
for i in range(time_struct.tm_mday % 5):
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(tmp)
tmp = h.digest()
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(urlsafe_b64encode(tmp).rstrip(b"=") + ts_1hour_str)
tmp = h.digest()
for i in range(time_struct.tm_hour % 5): # utc hour
h = hmac.new(self.SECRETKEY, digestmod=hashlib.sha256)
h.update(tmp)
tmp = h.digest()
return urlsafe_b64encode(tmp).rstrip(b"=").decode("utf-8")
def _is_playable(self, vtype, vid):
auth_header = {"Authorization": f"Bearer {self.usertoken}"}
if vtype == "episode":
res = self.session.http.get(self._PRGM_API.format(vid),
headers=auth_header)
jsonres = self.session.http.json(res, schema=self._PRGM_SCHEMA)
playable = False
for item in jsonres["terms"]:
if item.get("onDemandType", False) == 3:
playable = True
return playable
elif vtype == "slots":
res = self.session.http.get(self._SLOTS_API.format(vid),
headers=auth_header)
jsonres = self.session.http.json(res, schema=self._SLOT_SCHEMA)
return jsonres["slot"]["flags"].get("timeshiftFree", False) is True
def _get_streams(self):
deviceid = str(uuid.uuid4())
appkeysecret = self._generate_applicationkeysecret(deviceid)
json_data = {"deviceId": deviceid,
"applicationKeySecret": appkeysecret}
res = self.session.http.post(self._USER_API, json=json_data)
jsonres = self.session.http.json(res, schema=self._USER_SCHEMA)
self.usertoken = jsonres['token'] # for authorzation
matchresult = self.match
if matchresult.group("onair"):
onair = matchresult.group("onair")
if onair == "news-global":
self._CHANNEL = update_qsd(self._CHANNEL, {"division": "1"})
res = self.session.http.get(self._CHANNEL)
jsonres = self.session.http.json(res, schema=self._CHANNEL_SCHEMA)
channels = jsonres["channels"]
for channel in channels:
if onair == channel["id"]:
break
else:
raise NoStreamsError(self.url)
playlisturl = channel["playback"]["hls"]
elif matchresult.group("episode"):
episode = matchresult.group("episode")
if not self._is_playable("episode", episode):
log.error("Premium stream is not playable")
return {}
playlisturl = self._PRGM3U8.format(episode)
elif matchresult.group("slots"):
slots = matchresult.group("slots")
if not self._is_playable("slots", slots):
log.error("Premium stream is not playable")
return {}
playlisturl = self._SLOTM3U8.format(slots)
log.debug("URL={0}".format(playlisturl))
# hook abematv private protocol
self.session.http.mount("abematv-license://",
AbemaTVLicenseAdapter(self.session, deviceid,
self.usertoken))
return AbemaTVHLSStream.parse_variant_playlist(self.session, playlisturl)
__plugin__ = AbemaTV
| {
"content_hash": "208d7e7c11412b8c5a37e5c5f7f9c8d6",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 119,
"avg_line_length": 37.87404580152672,
"alnum_prop": 0.5784540965433841,
"repo_name": "streamlink/streamlink",
"id": "6881b44fd2180f2e7feeb96f889c83a83a650a83",
"size": "9923",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/streamlink/plugins/abematv.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1513527"
},
{
"name": "Shell",
"bytes": "6427"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import pickle
import pytest
pytestmark = pytest.mark.gpu
from distributed.protocol import deserialize, serialize
cuda = pytest.importorskip("numba.cuda")
np = pytest.importorskip("numpy")
@pytest.mark.parametrize("shape", [(0,), (5,), (4, 6), (10, 11), (2, 3, 5)])
@pytest.mark.parametrize("dtype", ["u1", "u4", "u8", "f4"])
@pytest.mark.parametrize("order", ["C", "F"])
@pytest.mark.parametrize("serializers", [("cuda",), ("dask",)])
def test_serialize_numba(shape, dtype, order, serializers):
if not cuda.is_available():
pytest.skip("CUDA is not available")
ary = np.arange(np.product(shape), dtype=dtype)
ary = np.ndarray(shape, dtype=ary.dtype, buffer=ary.data, order=order)
x = cuda.to_device(ary)
header, frames = serialize(x, serializers=serializers)
y = deserialize(header, frames, deserializers=serializers)
if serializers[0] == "cuda":
assert all(hasattr(f, "__cuda_array_interface__") for f in frames)
elif serializers[0] == "dask":
assert all(isinstance(f, memoryview) for f in frames)
hx = x.copy_to_host()
hy = y.copy_to_host()
assert (hx == hy).all()
@pytest.mark.parametrize("size", [0, 3, 10])
def test_serialize_numba_from_rmm(size):
np = pytest.importorskip("numpy")
rmm = pytest.importorskip("rmm")
if not cuda.is_available():
pytest.skip("CUDA is not available")
x_np = np.arange(size, dtype="u1")
x_np_desc = x_np.__array_interface__
(x_np_ptr, _) = x_np_desc["data"]
(x_np_size,) = x_np_desc["shape"]
x = rmm.DeviceBuffer(ptr=x_np_ptr, size=x_np_size)
header, frames = serialize(x, serializers=("cuda", "dask", "pickle"))
header["type-serialized"] = pickle.dumps(cuda.devicearray.DeviceNDArray)
y = deserialize(header, frames, deserializers=("cuda", "dask", "pickle", "error"))
assert (x_np == y.copy_to_host()).all()
| {
"content_hash": "72619061e26c51ef5e9b41ab34c37f36",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 86,
"avg_line_length": 32.50847457627118,
"alnum_prop": 0.6444212721584984,
"repo_name": "dask/distributed",
"id": "b1b05d2e0bee1f37db1ea23dc52c2a7e01ed38c1",
"size": "1918",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "distributed/protocol/tests/test_numba.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4220"
},
{
"name": "HTML",
"bytes": "16583"
},
{
"name": "JavaScript",
"bytes": "9337"
},
{
"name": "Jinja",
"bytes": "17081"
},
{
"name": "Python",
"bytes": "3746516"
},
{
"name": "Shell",
"bytes": "2030"
}
],
"symlink_target": ""
} |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class First_test(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
# self.driver.implicitly_wait(30)
self.base_url = "https://www.katalon.com/"
self.verificationErrors = []
self.accept_next_alert = True
def test_untitled_test_case(self):
driver = self.driver
driver.get("https://www.google.com.ua/")
driver.find_element_by_id("lst-ib").click()
driver.find_element_by_id("lst-ib").clear()
driver.find_element_by_id("lst-ib").send_keys("selenium webdriver")
driver.find_element_by_id("lst-ib").send_keys(Keys.ENTER)
driver.find_element_by_link_text(u"Что такое Selenium WebDriver? / Хабрахабр").click()
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "888eb5e52284912b8821f1abbbf3b43b",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 94,
"avg_line_length": 32.932203389830505,
"alnum_prop": 0.6289243437982501,
"repo_name": "Slonik20007/python_training",
"id": "efed277562ab0a2968899dfbc3be89594ec331d4",
"size": "1984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "First_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1984"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.