Search is not available for this dataset
text
stringlengths 75
104k
|
---|
def parse_select(cls, text: str) -> Set:
"""
get columns from select text
:param text: col1, col2
:return: ALL_COLUMNS or ['col1', 'col2']
"""
if text == '*':
return ALL_COLUMNS # None means ALL
selected_columns = set(filter(lambda x: x, map(str.strip, text.split(','))))
if not selected_columns:
raise InvalidParams("No column(s) selected")
return selected_columns
|
def parse_load_fk(cls, data: Dict[str, List[Dict[str, object]]]) -> Dict[str, List[Dict[str, object]]]:
"""
:param data:{
<column>: role,
<column2>: role,
<column>: {
'role': role,
'loadfk': { ... },
},
:return: {
<column>: {
'role': role,
},
...
<column3>: {
'role': role,
'loadfk': { ... },
},
}
"""
default_value_dict = {'role': None, 'as': None, 'table': None, 'loadfk': None}
def value_normalize_dict(value):
def check(k, v):
if k == 'role': return isinstance(v, str)
if k == 'as': return isinstance(v, str)
if k == 'table': return isinstance(v, str)
if k == 'loadfk': return isinstance(v, dict)
valid = {k: v for k, v in value.items() if check(k, v)}
if not valid: return default_value_dict.copy()
if 'loadfk' in valid and valid['loadfk']:
valid['loadfk'] = cls.parse_load_fk(valid['loadfk'])
for k, v in default_value_dict.items():
valid.setdefault(k, v)
return valid
def value_normalize(value, no_list=True):
if value is None:
return default_value_dict.copy()
elif not no_list and isinstance(value, List):
# <column>: [value1, value2, ...]
return list(map(value_normalize, value))
elif isinstance(value, str):
# <column>: role
val = default_value_dict.copy()
val['role'] = value
return val
elif isinstance(value, Dict):
# {'role': <str>, 'as': <str>, ...}
return value_normalize_dict(value)
else:
raise InvalidParams('Invalid syntax for "loadfk": %s' % value)
# 对全部项进行检查
new_data = {}
if not isinstance(data, dict):
raise InvalidParams('Invalid syntax for "loadfk": %s' % data)
for k, v in data.items():
nv = value_normalize(v, False)
new_data[k] = nv if isinstance(nv, List) else [nv]
return new_data
|
def add_condition(self, field_name, op, value):
"""
Add a query condition and validate it.
raise ParamsException if failed.
self.view required
:param field_name:
:param op:
:param value:
:return: None
"""
if not isinstance(op, SQL_OP):
if op not in SQL_OP.txt2op:
raise SQLOperatorInvalid(op)
else:
op = SQL_OP.txt2op.get(op)
self.conditions.append([field_name, op, value])
|
def _packb2(obj, **options):
"""
Serialize a Python object into MessagePack bytes.
Args:
obj: a Python object
Kwargs:
ext_handlers (dict): dictionary of Ext handlers, mapping a custom type
to a callable that packs an instance of the type
into an Ext object
force_float_precision (str): "single" to force packing floats as
IEEE-754 single-precision floats,
"double" to force packing floats as
IEEE-754 double-precision floats.
Returns:
A 'str' containing serialized MessagePack bytes.
Raises:
UnsupportedType(PackException):
Object type not supported for packing.
Example:
>>> umsgpack.packb({u"compact": True, u"schema": 0})
'\x82\xa7compact\xc3\xa6schema\x00'
>>>
"""
fp = io.BytesIO()
_pack2(obj, fp, **options)
return fp.getvalue()
|
def _packb3(obj, **options):
"""
Serialize a Python object into MessagePack bytes.
Args:
obj: a Python object
Kwargs:
ext_handlers (dict): dictionary of Ext handlers, mapping a custom type
to a callable that packs an instance of the type
into an Ext object
force_float_precision (str): "single" to force packing floats as
IEEE-754 single-precision floats,
"double" to force packing floats as
IEEE-754 double-precision floats.
Returns:
A 'bytes' containing serialized MessagePack bytes.
Raises:
UnsupportedType(PackException):
Object type not supported for packing.
Example:
>>> umsgpack.packb({u"compact": True, u"schema": 0})
b'\x82\xa7compact\xc3\xa6schema\x00'
>>>
"""
fp = io.BytesIO()
_pack3(obj, fp, **options)
return fp.getvalue()
|
def _unpackb2(s, **options):
"""
Deserialize MessagePack bytes into a Python object.
Args:
s: a 'str' or 'bytearray' containing serialized MessagePack bytes
Kwargs:
ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext
type to a callable that unpacks an instance of
Ext into an object
use_ordered_dict (bool): unpack maps into OrderedDict, instead of
unordered dict (default False)
allow_invalid_utf8 (bool): unpack invalid strings into instances of
InvalidString, for access to the bytes
(default False)
Returns:
A Python object.
Raises:
TypeError:
Packed data type is neither 'str' nor 'bytearray'.
InsufficientDataException(UnpackException):
Insufficient data to unpack the serialized object.
InvalidStringException(UnpackException):
Invalid UTF-8 string encountered during unpacking.
UnsupportedTimestampException(UnpackException):
Unsupported timestamp format encountered during unpacking.
ReservedCodeException(UnpackException):
Reserved code encountered during unpacking.
UnhashableKeyException(UnpackException):
Unhashable key encountered during map unpacking.
The serialized map cannot be deserialized into a Python dictionary.
DuplicateKeyException(UnpackException):
Duplicate key encountered during map unpacking.
Example:
>>> umsgpack.unpackb(b'\x82\xa7compact\xc3\xa6schema\x00')
{u'compact': True, u'schema': 0}
>>>
"""
if not isinstance(s, (str, bytearray)):
raise TypeError("packed data must be type 'str' or 'bytearray'")
return _unpack(io.BytesIO(s), options)
|
def _unpackb3(s, **options):
"""
Deserialize MessagePack bytes into a Python object.
Args:
s: a 'bytes' or 'bytearray' containing serialized MessagePack bytes
Kwargs:
ext_handlers (dict): dictionary of Ext handlers, mapping integer Ext
type to a callable that unpacks an instance of
Ext into an object
use_ordered_dict (bool): unpack maps into OrderedDict, instead of
unordered dict (default False)
allow_invalid_utf8 (bool): unpack invalid strings into instances of
InvalidString, for access to the bytes
(default False)
Returns:
A Python object.
Raises:
TypeError:
Packed data type is neither 'bytes' nor 'bytearray'.
InsufficientDataException(UnpackException):
Insufficient data to unpack the serialized object.
InvalidStringException(UnpackException):
Invalid UTF-8 string encountered during unpacking.
UnsupportedTimestampException(UnpackException):
Unsupported timestamp format encountered during unpacking.
ReservedCodeException(UnpackException):
Reserved code encountered during unpacking.
UnhashableKeyException(UnpackException):
Unhashable key encountered during map unpacking.
The serialized map cannot be deserialized into a Python dictionary.
DuplicateKeyException(UnpackException):
Duplicate key encountered during map unpacking.
Example:
>>> umsgpack.unpackb(b'\x82\xa7compact\xc3\xa6schema\x00')
{'compact': True, 'schema': 0}
>>>
"""
if not isinstance(s, (bytes, bytearray)):
raise TypeError("packed data must be type 'bytes' or 'bytearray'")
return _unpack(io.BytesIO(s), options)
|
def view_bind(app, cls_url, view_cls: Type['BaseView']):
"""
将 API 绑定到 web 服务上
:param view_cls:
:param app:
:param cls_url:
:return:
"""
if view_cls._no_route: return
cls_url = cls_url or view_cls.__class__.__name__.lower()
def add_route(name, route_info, beacon_info):
for method in route_info['method']:
async def beacon(request): pass
route_key = route_info['url'] if route_info['url'] else name
app._raw_app.router.add_route(method, urljoin('/api', cls_url, route_key), beacon)
app.route._beacons[beacon] = beacon_info
# noinspection PyProtectedMember
for name, route_info_lst in view_cls._interface.items():
for route_info in route_info_lst:
real_handler = getattr(view_cls, name, None)
if real_handler is None: continue # TODO: delete
assert real_handler is not None, "handler must be exists"
handler_name = '%s.%s' % (view_cls.__name__, real_handler.__name__)
assert iscoroutinefunction(real_handler), "Add 'async' before interface function %r" % handler_name
beacon_info = {
'view': view_cls,
'name': name,
'handler': real_handler,
'route_info': route_info
}
add_route(name, route_info, beacon_info)
|
def add_static(self, prefix, path, **kwargs):
"""
:param prefix: URL prefix
:param path: file directory
:param kwargs:
:return:
"""
self.statics.append((prefix, path, kwargs),)
|
def parse_query_by_json(data):
"""
['and',
['==', 't1', 'col1', val1],
['!=', 't1', 'col2', 't2', 'col2'],
['and',
['==', 't1', 'col3', val3],
['!=', 't2', 'col4', val4],
]
]
:return:
:param data:
:return:
"""
data = json.loads(data)
for i in ('tables', 'columns', 'conditions'):
if i not in data:
raise QueryException("query: %s not found" % i)
tables = data['tables']
columns = data['columns']
conditions = data['conditions']
def parse_stmt(s, expr_cls, all_op, multi_items_op):
if len(s) == 0:
return []
if s[0] in all_op:
if s[0] in multi_items_op:
values = []
for i in s[1:]:
values.append(parse_stmt(i, expr_cls, all_op, multi_items_op))
return expr_cls(None, s[0], None, values=values)
else:
if len(s) == 5:
# t1.c1 == t2.c2
lhs = Column(s[2], table=s[1])
rhs = Column(s[4], table=s[3])
if (s[1] not in tables) or (s[3] not in tables):
raise QueryException('Bad query')
return expr_cls(lhs, s[0], rhs)
else:
# t1.c1 == val
lhs = Column(s[2], table=s[1])
if s[1] not in tables:
raise QueryException('Bad query')
return expr_cls(lhs, s[0], s[3])
else:
raise QueryException('Bad query')
query_op = ('+', '-', '*', '/')
query_columns = []
for i in columns:
if len(i) == 2:
query_columns.append(Column(i[1], table=i[0]))
else:
query_columns.append(parse_stmt(i, QueryExpression, query_op, query_op))
wheres = parse_stmt(conditions, ConditionExpression, _operator_map, ('and', 'or',))
return {
'tables': tables,
'columns': query_columns,
'wheres': wheres,
}
|
def validate(method):
"""
Config option name value validator decorator.
"""
# Name error template
name_error = 'configuration option "{}" is not supported'
@functools.wraps(method)
def validator(self, name, *args):
if name not in self.allowed_opts:
raise ValueError(name_error.format(name))
return method(self, name, *args)
return validator
|
def run(self, ctx):
"""
Runs the current phase.
"""
# Reverse engine assertion if needed
if ctx.reverse:
self.engine.reverse()
if self.engine.empty:
raise AssertionError('grappa: no assertions to run')
try:
# Run assertion in series and return error, if present
return self.run_assertions(ctx)
except Exception as _err:
# Handle legit grappa internval errors
if getattr(_err, '__legit__', False):
raise _err
# Otherwise render it
return self.render_error(ctx, _err)
|
def observe(matcher):
"""
Internal decorator to trigger operator hooks before/after
matcher execution.
"""
@functools.wraps(matcher)
def observer(self, subject, *expected, **kw):
# Trigger before hook, if present
if hasattr(self, 'before'):
self.before(subject, *expected, **kw)
# Trigger matcher method
result = matcher(self, subject, *expected, **kw)
# After error hook
if result is not True and hasattr(self, 'after_error'):
self.after_error(result, subject, *expected, **kw)
# After success hook
if result is True and hasattr(self, 'after_success'):
self.after_success(subject, *expected, **kw)
# Enable diff comparison on error, if needed
if not hasattr(self, 'show_diff'):
self.show_diff = all([
isinstance(subject, six.string_types),
all([isinstance(x, six.string_types) for x in expected]),
])
return result
return observer
|
def run_matcher(self, subject, *expected, **kw):
"""
Runs the operator matcher test function.
"""
# Update assertion expectation
self.expected = expected
_args = (subject,)
if self.kind == OperatorTypes.MATCHER:
_args += expected
try:
result = self.match(*_args, **kw)
except Exception as error:
return self._make_error(error=error)
reasons = []
if isinstance(result, tuple):
result, reasons = result
if result is False and self.ctx.negate:
return True
if result is True and not self.ctx.negate:
return True
return self._make_error(reasons=reasons)
|
def run(self, *args, **kw):
"""
Runs the current operator with the subject arguments to test.
This method is implemented by matchers only.
"""
log.debug('[operator] run "{}" with arguments: {}'.format(
self.__class__.__name__, args
))
if self.kind == OperatorTypes.ATTRIBUTE:
return self.match(self.ctx)
else:
return self.run_matcher(*args, **kw)
|
def operator(name=None, operators=None, aliases=None, kind=None):
"""
Registers a new operator function in the test engine.
Arguments:
*args: variadic arguments.
**kw: variadic keyword arguments.
Returns:
function
"""
def delegator(assertion, subject, expected, *args, **kw):
return assertion.test(subject, expected, *args, **kw)
def decorator(fn):
operator = Operator(fn=fn, aliases=aliases, kind=kind)
_name = name if isinstance(name, six.string_types) else fn.__name__
operator.operators = (_name,)
_operators = operators
if isinstance(_operators, list):
_operators = tuple(_operators)
if isinstance(_operators, tuple):
operator.operators += _operators
# Register operator
Engine.register(operator)
return functools.partial(delegator, operator)
return decorator(name) if inspect.isfunction(name) else decorator
|
def attribute(*args, **kw):
"""
Registers a new attribute only operator function in the test engine.
Arguments:
*args: variadic arguments.
**kw: variadic keyword arguments.
Returns:
function
"""
return operator(kind=Operator.Type.ATTRIBUTE, *args, **kw)
|
def use(plugin):
"""
Register plugin in grappa.
`plugin` argument can be a function or a object that implement `register`
method, which should accept one argument: `grappa.Engine` instance.
Arguments:
plugin (function|module): grappa plugin object to register.
Raises:
ValueError: if `plugin` is not a valid interface.
Example::
import grappa
class MyOperator(grappa.Operator):
pass
def my_plugin(engine):
engine.register(MyOperator)
grappa.use(my_plugin)
"""
log.debug('register new plugin: {}'.format(plugin))
if inspect.isfunction(plugin):
return plugin(Engine)
if plugin and hasattr(plugin, 'register'):
return plugin.register(Engine)
raise ValueError('invalid plugin: must be a function or '
'implement register() method')
|
def load():
"""
Loads the built-in operators into the global test engine.
"""
for operator in operators:
module, symbols = operator[0], operator[1:]
path = 'grappa.operators.{}'.format(module)
# Dynamically import modules
operator = __import__(path, None, None, symbols)
# Register operators in the test engine
for symbol in symbols:
Engine.register(getattr(operator, symbol))
|
def register_operators(*operators):
"""
Registers one or multiple operators in the test engine.
"""
def validate(operator):
if isoperator(operator):
return True
raise NotImplementedError('invalid operator: {}'.format(operator))
def register(operator):
# Register operator by DSL keywords
for name in operator.operators:
# Check valid operators
if name in Engine.operators:
raise ValueError('operator name "{}" from {} is already '
'in use by other operator'.format(
name,
operator.__name__
))
# Register operator by name
Engine.operators[name] = operator
# Validates and registers operators
[register(operator) for operator in operators if validate(operator)]
|
def find_address_file(self):
"""
Finds the OMXPlayer DBus connection
Assumes there is an alive OMXPlayer process.
:return:
"""
possible_address_files = []
while not possible_address_files:
# filter is used here as glob doesn't support regexp :(
isnt_pid_file = lambda path: not path.endswith('.pid')
possible_address_files = list(filter(isnt_pid_file,
glob('/tmp/omxplayerdbus.*')))
possible_address_files.sort(key=lambda path: os.path.getmtime(path))
time.sleep(0.05)
self.path = possible_address_files[-1]
|
def load(self, source, pause=False):
"""
Loads a new source (as a file) from ``source`` (a file path or URL)
by killing the current ``omxplayer`` process and forking a new one.
Args:
source (string): Path to the file to play or URL
"""
self._source = source
self._load_source(source)
if pause:
time.sleep(0.5) # Wait for the DBus interface to be initialised
self.pause()
|
def set_volume(self, volume):
"""
Args:
float: volume in the interval [0, 10]
"""
# 0 isn't handled correctly so we have to set it to a very small value to achieve the same purpose
if volume == 0:
volume = 1e-10
return self._player_interface_property('Volume', dbus.Double(volume))
|
def set_rate(self, rate):
"""
Set the playback rate of the video as a multiple of the default playback speed
Examples:
>>> player.set_rate(2)
# Will play twice as fast as normal speed
>>> player.set_rate(0.5)
# Will play half speed
"""
self._rate = self._player_interface_property('Rate', dbus.Double(rate))
return self._rate
|
def pause(self):
"""
Pause playback
"""
self._player_interface.Pause()
self._is_playing = False
self.pauseEvent(self)
|
def play_pause(self):
"""
Pause playback if currently playing, otherwise start playing if currently paused.
"""
self._player_interface.PlayPause()
self._is_playing = not self._is_playing
if self._is_playing:
self.playEvent(self)
else:
self.pauseEvent(self)
|
def seek(self, relative_position):
"""
Seek the video by `relative_position` seconds
Args:
relative_position (float): The position in seconds to seek to.
"""
self._player_interface.Seek(Int64(1000.0 * 1000 * relative_position))
self.seekEvent(self, relative_position)
|
def set_position(self, position):
"""
Set the video to playback position to `position` seconds from the start of the video
Args:
position (float): The position in seconds.
"""
self._player_interface.SetPosition(ObjectPath("/not/used"), Int64(position * 1000.0 * 1000))
self.positionEvent(self, position)
|
def set_video_pos(self, x1, y1, x2, y2):
"""
Set the video position on the screen
Args:
x1 (int): Top left x coordinate (px)
y1 (int): Top left y coordinate (px)
x2 (int): Bottom right x coordinate (px)
y2 (int): Bottom right y coordinate (px)
"""
position = "%s %s %s %s" % (str(x1),str(y1),str(x2),str(y2))
self._player_interface.VideoPos(ObjectPath('/not/used'), String(position))
|
def video_pos(self):
"""
Returns:
(int, int, int, int): Video spatial position (x1, y1, x2, y2) where (x1, y1) is top left,
and (x2, y2) is bottom right. All values in px.
"""
position_string = self._player_interface.VideoPos(ObjectPath('/not/used'))
return list(map(int, position_string.split(" ")))
|
def set_video_crop(self, x1, y1, x2, y2):
"""
Args:
x1 (int): Top left x coordinate (px)
y1 (int): Top left y coordinate (px)
x2 (int): Bottom right x coordinate (px)
y2 (int): Bottom right y coordinate (px)
"""
crop = "%s %s %s %s" % (str(x1),str(y1),str(x2),str(y2))
self._player_interface.SetVideoCropPos(ObjectPath('/not/used'), String(crop))
|
def is_playing(self):
"""
Returns:
bool: Whether the player is playing
"""
self._is_playing = (self.playback_status() == "Playing")
logger.info("Playing?: %s" % self._is_playing)
return self._is_playing
|
def play_sync(self):
"""
Play the video and block whilst the video is playing
"""
self.play()
logger.info("Playing synchronously")
try:
time.sleep(0.05)
logger.debug("Wait for playing to start")
while self.is_playing():
time.sleep(0.05)
except DBusException:
logger.error(
"Cannot play synchronously any longer as DBus calls timed out."
)
|
def play(self):
"""
Play the video asynchronously returning control immediately to the calling code
"""
if not self.is_playing():
self.play_pause()
self._is_playing = True
self.playEvent(self)
|
def quit(self):
"""
Quit the player, blocking until the process has died
"""
if self._process is None:
logger.debug('Quit was called after self._process had already been released')
return
try:
logger.debug('Quitting OMXPlayer')
process_group_id = os.getpgid(self._process.pid)
os.killpg(process_group_id, signal.SIGTERM)
logger.debug('SIGTERM Sent to pid: %s' % process_group_id)
self._process_monitor.join()
except OSError:
logger.error('Could not find the process to kill')
self._process = None
|
def render_to_response(self, context, **response_kwargs):
"""
Returns a response with a template depending if the request is ajax
or not and it renders with the given context.
"""
if self.request.is_ajax():
template = self.page_template
else:
template = self.get_template_names()
return self.response_class(
request=self.request,
template=template,
context=context,
**response_kwargs
)
|
def translate_value(document_field, form_value):
"""
Given a document_field and a form_value this will translate the value
to the correct result for mongo to use.
"""
value = form_value
if isinstance(document_field, ReferenceField):
value = document_field.document_type.objects.get(id=form_value) if form_value else None
return value
|
def trim_field_key(document, field_key):
"""
Returns the smallest delimited version of field_key that
is an attribute on document.
return (key, left_over_array)
"""
trimming = True
left_over_key_values = []
current_key = field_key
while trimming and current_key:
if hasattr(document, current_key):
trimming = False
else:
key_array = current_key.split("_")
left_over_key_values.append(key_array.pop())
current_key = u"_".join(key_array)
left_over_key_values.reverse()
return current_key, left_over_key_values
|
def has_edit_permission(self, request):
""" Can edit this object """
return request.user.is_authenticated and request.user.is_active and request.user.is_staff
|
def has_add_permission(self, request):
""" Can add this object """
return request.user.is_authenticated and request.user.is_active and request.user.is_staff
|
def has_delete_permission(self, request):
""" Can delete this object """
return request.user.is_authenticated and request.user.is_active and request.user.is_superuser
|
def get_form_field_dict(self, model_dict):
"""
Takes a model dictionary representation and creates a dictionary
keyed by form field. Each value is a keyed 4 tuple of:
(widget, mode_field_instance, model_field_type, field_key)
"""
return_dict = OrderedDict()
# Workaround: mongoengine doesn't preserve form fields ordering from metaclass __new__
if hasattr(self.model, 'Meta') and hasattr(self.model.Meta, 'form_fields_ordering'):
field_order_list = tuple(form_field for form_field
in self.model.Meta.form_fields_ordering
if form_field in model_dict.iterkeys())
order_dict = OrderedDict.fromkeys(field_order_list)
return_dict = order_dict
for field_key, field_dict in sorted(model_dict.items()):
if not field_key.startswith("_"):
widget = field_dict.get('_widget', None)
if widget is None:
return_dict[field_key] = self.get_form_field_dict(field_dict)
return_dict[field_key].update({'_field_type': field_dict.get('_field_type', None)})
else:
return_dict[field_key] = FieldTuple(widget,
field_dict.get('_document_field', None),
field_dict.get('_field_type', None),
field_dict.get('_key', None))
return return_dict
|
def set_form_fields(self, form_field_dict, parent_key=None, field_type=None):
"""
Set the form fields for every key in the form_field_dict.
Params:
form_field_dict -- a dictionary created by get_form_field_dict
parent_key -- the key for the previous key in the recursive call
field_type -- used to determine what kind of field we are setting
"""
for form_key, field_value in form_field_dict.items():
form_key = make_key(parent_key, form_key) if parent_key is not None else form_key
if isinstance(field_value, tuple):
set_list_class = False
base_key = form_key
# Style list fields
if ListField in (field_value.field_type, field_type):
# Nested lists/embedded docs need special care to get
# styles to work out nicely.
if parent_key is None or ListField == field_value.field_type:
if field_type != EmbeddedDocumentField:
field_value.widget.attrs['class'] += ' listField {0}'.format(form_key)
set_list_class = True
else:
field_value.widget.attrs['class'] += ' listField'
# Compute number value for list key
list_keys = [field_key for field_key in self.form.fields.keys()
if has_digit(field_key)]
key_int = 0
while form_key in list_keys:
key_int += 1
form_key = make_key(form_key, key_int)
if parent_key is not None:
# Get the base key for our embedded field class
valid_base_keys = [model_key for model_key in self.model_map_dict.keys()
if not model_key.startswith("_")]
while base_key not in valid_base_keys and base_key:
base_key = make_key(base_key, exclude_last_string=True)
# We need to remove the trailing number from the key
# so that grouping will occur on the front end when we have a list.
embedded_key_class = None
if set_list_class:
field_value.widget.attrs['class'] += " listField".format(base_key)
embedded_key_class = make_key(field_key, exclude_last_string=True)
field_value.widget.attrs['class'] += " embeddedField"
# Setting the embedded key correctly allows to visually nest the
# embedded documents on the front end.
if base_key == parent_key:
field_value.widget.attrs['class'] += ' {0}'.format(base_key)
else:
field_value.widget.attrs['class'] += ' {0} {1}'.format(base_key, parent_key)
if embedded_key_class is not None:
field_value.widget.attrs['class'] += ' {0}'.format(embedded_key_class)
default_value = self.get_field_value(form_key)
# Style embedded documents
if isinstance(default_value, list) and len(default_value) > 0:
key_index = int(form_key.split("_")[-1])
new_base_key = make_key(form_key, exclude_last_string=True)
for list_value in default_value:
# Note, this is copied every time so each widget gets a different class
list_widget = deepcopy(field_value.widget)
new_key = make_key(new_base_key, six.text_type(key_index))
list_widget.attrs['class'] += " {0}".format(make_key(base_key, key_index))
self.set_form_field(list_widget, field_value.document_field, new_key, list_value)
key_index += 1
else:
self.set_form_field(field_value.widget, field_value.document_field,
form_key, default_value)
elif isinstance(field_value, dict):
self.set_form_fields(field_value, form_key, field_value.get("_field_type", None))
|
def set_form_field(self, widget, model_field, field_key, default_value):
"""
Parmams:
widget -- the widget to use for displyaing the model_field
model_field -- the field on the model to create a form field with
field_key -- the name for the field on the form
default_value -- the value to give for the field
Default: None
"""
# Empty lists cause issues on form validation
if default_value == []:
default_value = None
if widget and isinstance(widget, forms.widgets.Select):
self.form.fields[field_key] = forms.ChoiceField(label=model_field.name,
required=model_field.required,
widget=widget)
else:
field_class = get_form_field_class(model_field)
self.form.fields[field_key] = field_class(label=model_field.name,
required=model_field.required,
widget=widget)
if default_value is not None:
if isinstance(default_value, Document):
# Probably a reference field, therefore, add id
self.form.fields[field_key].initial = getattr(default_value, 'id', None)
else:
self.form.fields[field_key].initial = default_value
else:
self.form.fields[field_key].initial = getattr(model_field, 'default', None)
if isinstance(model_field, ReferenceField):
self.form.fields[field_key].choices = [(six.text_type(x.id), get_document_unicode(x))
for x in model_field.document_type.objects.all()]
# Adding in blank choice so a reference field can be deleted by selecting blank
self.form.fields[field_key].choices.insert(0, ("", ""))
elif model_field.choices:
self.form.fields[field_key].choices = model_field.choices
for key, form_attr in CHECK_ATTRS.items():
if hasattr(model_field, key):
value = getattr(model_field, key)
setattr(self.form.fields[field_key], key, value)
|
def get_field_value(self, field_key):
"""
Given field_key will return value held at self.model_instance. If
model_instance has not been provided will return None.
"""
def get_value(document, field_key):
# Short circuit the function if we do not have a document
if document is None:
return None
current_key, new_key_array = trim_field_key(document, field_key)
key_array_digit = int(new_key_array[-1]) if new_key_array and has_digit(new_key_array) else None
new_key = make_key(new_key_array)
if key_array_digit is not None and len(new_key_array) > 0:
# Handleing list fields
if len(new_key_array) == 1:
return_data = document._data.get(current_key, [])
elif isinstance(document, BaseList):
return_list = []
if len(document) > 0:
return_list = [get_value(doc, new_key) for doc in document]
return_data = return_list
else:
return_data = get_value(getattr(document, current_key), new_key)
elif len(new_key_array) > 0:
return_data = get_value(document._data.get(current_key), new_key)
else:
# Handeling all other fields and id
try: # Added try except otherwise we get "TypeError: getattr(): attribute name must be string" error from mongoengine/base/datastructures.py
return_data = (document._data.get(None, None) if current_key == "id" else
document._data.get(current_key, None))
except:
return_data = document._data.get(current_key, None)
return return_data
if self.is_initialized:
return get_value(self.model_instance, field_key)
else:
return None
|
def has_digit(string_or_list, sep="_"):
"""
Given a string or a list will return true if the last word or
element is a digit. sep is used when a string is given to know
what separates one word from another.
"""
if isinstance(string_or_list, (tuple, list)):
list_length = len(string_or_list)
if list_length:
return six.text_type(string_or_list[-1]).isdigit()
else:
return False
else:
return has_digit(string_or_list.split(sep))
|
def make_key(*args, **kwargs):
"""
Given any number of lists and strings will join them in order as one
string separated by the sep kwarg. sep defaults to u"_".
Add exclude_last_string=True as a kwarg to exclude the last item in a
given string after being split by sep. Note if you only have one word
in your string you can end up getting an empty string.
Example uses:
>>> from mongonaut.forms.form_utils import make_key
>>> make_key('hi', 'my', 'firend')
>>> u'hi_my_firend'
>>> make_key('hi', 'my', 'firend', sep='i')
>>> 'hiimyifirend'
>>> make_key('hi', 'my', 'firend',['this', 'be', 'what'], sep='i')
>>> 'hiimyifirendithisibeiwhat'
>>> make_key('hi', 'my', 'firend',['this', 'be', 'what'])
>>> u'hi_my_firend_this_be_what'
"""
sep = kwargs.get('sep', u"_")
exclude_last_string = kwargs.get('exclude_last_string', False)
string_array = []
for arg in args:
if isinstance(arg, list):
string_array.append(six.text_type(sep.join(arg)))
else:
if exclude_last_string:
new_key_array = arg.split(sep)[:-1]
if len(new_key_array) > 0:
string_array.append(make_key(new_key_array))
else:
string_array.append(six.text_type(arg))
return sep.join(string_array)
|
def set_fields(self):
"""Sets existing data to form fields."""
# Get dictionary map of current model
if self.is_initialized:
self.model_map_dict = self.create_document_dictionary(self.model_instance)
else:
self.model_map_dict = self.create_document_dictionary(self.model)
form_field_dict = self.get_form_field_dict(self.model_map_dict)
self.set_form_fields(form_field_dict)
|
def set_post_data(self):
"""
Need to set form data so that validation on all post data occurs and
places newly entered form data on the form object.
"""
self.form.data = self.post_data_dict
# Specifically adding list field keys to the form so they are included
# in form.cleaned_data after the call to is_valid
for field_key, field in self.form.fields.items():
if has_digit(field_key):
# We have a list field.
base_key = make_key(field_key, exclude_last_string=True)
# Add new key value with field to form fields so validation
# will work correctly
for key in self.post_data_dict.keys():
if base_key in key:
self.form.fields.update({key: field})
|
def get_form(self):
"""
Generate the form for view.
"""
self.set_fields()
if self.post_data_dict is not None:
self.set_post_data()
return self.form
|
def create_doc_dict(self, document, doc_key=None, owner_document=None):
"""
Generate a dictionary representation of the document. (no recursion)
DO NOT CALL DIRECTLY
"""
# Get doc field for top level documents
if owner_document:
doc_field = owner_document._fields.get(doc_key, None) if doc_key else None
else:
doc_field = document._fields.get(doc_key, None) if doc_key else None
# Generate the base fields for the document
doc_dict = {"_document": document if owner_document is None else owner_document,
"_key": document.__class__.__name__.lower() if doc_key is None else doc_key,
"_document_field": doc_field}
if not isinstance(document, TopLevelDocumentMetaclass) and doc_key:
doc_dict.update({"_field_type": EmbeddedDocumentField})
for key, field in document._fields.items():
doc_dict[key] = field
return doc_dict
|
def create_list_dict(self, document, list_field, doc_key):
"""
Genereates a dictionary representation of the list field. Document
should be the document the list_field comes from.
DO NOT CALL DIRECTLY
"""
list_dict = {"_document": document}
if isinstance(list_field.field, EmbeddedDocumentField):
list_dict.update(self.create_document_dictionary(document=list_field.field.document_type_obj,
owner_document=document))
# Set the list_dict after it may have been updated
list_dict.update({"_document_field": list_field.field,
"_key": doc_key,
"_field_type": ListField,
"_widget": get_widget(list_field.field),
"_value": getattr(document, doc_key, None)})
return list_dict
|
def create_document_dictionary(self, document, document_key=None,
owner_document=None):
"""
Given document generates a dictionary representation of the document.
Includes the widget for each for each field in the document.
"""
doc_dict = self.create_doc_dict(document, document_key, owner_document)
for doc_key, doc_field in doc_dict.items():
# Base fields should not be evaluated
if doc_key.startswith("_"):
continue
if isinstance(doc_field, ListField):
doc_dict[doc_key] = self.create_list_dict(document, doc_field, doc_key)
elif isinstance(doc_field, EmbeddedDocumentField):
doc_dict[doc_key] = self.create_document_dictionary(doc_dict[doc_key].document_type_obj,
doc_key)
else:
doc_dict[doc_key] = {"_document": document,
"_key": doc_key,
"_document_field": doc_field,
"_widget": get_widget(doc_dict[doc_key], getattr(doc_field, 'disabled', False))}
return doc_dict
|
def get_widget(model_field, disabled=False):
"""Choose which widget to display for a field."""
attrs = get_attrs(model_field, disabled)
if hasattr(model_field, "max_length") and not model_field.max_length:
return forms.Textarea(attrs=attrs)
elif isinstance(model_field, DateTimeField):
return forms.DateTimeInput(attrs=attrs)
elif isinstance(model_field, BooleanField):
return forms.CheckboxInput(attrs=attrs)
elif isinstance(model_field, ReferenceField) or model_field.choices:
return forms.Select(attrs=attrs)
elif (isinstance(model_field, ListField) or
isinstance(model_field, EmbeddedDocumentField) or
isinstance(model_field, GeoPointField)):
return None
else:
return forms.TextInput(attrs=attrs)
|
def get_attrs(model_field, disabled=False):
"""Set attributes on the display widget."""
attrs = {}
attrs['class'] = 'span6 xlarge'
if disabled or isinstance(model_field, ObjectIdField):
attrs['class'] += ' disabled'
attrs['readonly'] = 'readonly'
return attrs
|
def get_form_field_class(model_field):
"""Gets the default form field for a mongoenigne field."""
FIELD_MAPPING = {
IntField: forms.IntegerField,
StringField: forms.CharField,
FloatField: forms.FloatField,
BooleanField: forms.BooleanField,
DateTimeField: forms.DateTimeField,
DecimalField: forms.DecimalField,
URLField: forms.URLField,
EmailField: forms.EmailField
}
return FIELD_MAPPING.get(model_field.__class__, forms.CharField)
|
def get_document_value(document, key):
'''
Returns the display value of a field for a particular MongoDB document.
'''
value = getattr(document, key)
if isinstance(value, ObjectId):
return value
if isinstance(document._fields.get(key), URLField):
return mark_safe("""<a href="{0}">{1}</a>""".format(value, value))
if isinstance(value, Document):
app_label = value.__module__.replace(".models", "")
document_name = value._class_name
url = reverse(
"document_detail",
kwargs={'app_label': app_label, 'document_name': document_name,
'id': value.id})
return mark_safe("""<a href="{0}">{1}</a>""".format(url, value))
return value
|
def get_qset(self, queryset, q):
"""Performs filtering against the default queryset returned by
mongoengine.
"""
if self.mongoadmin.search_fields and q:
params = {}
for field in self.mongoadmin.search_fields:
if field == 'id':
# check to make sure this is a valid ID, otherwise we just continue
if is_valid_object_id(q):
return queryset.filter(pk=q)
continue
search_key = "{field}__icontains".format(field=field)
params[search_key] = q
queryset = queryset.filter(**params)
return queryset
|
def get_queryset(self):
"""Replicates Django CBV `get_queryset()` method, but for MongoEngine.
"""
if hasattr(self, "queryset") and self.queryset:
return self.queryset
self.set_mongonaut_base()
self.set_mongoadmin()
self.document = getattr(self.models, self.document_name)
queryset = self.document.objects.all()
if self.mongoadmin.ordering:
queryset = queryset.order_by(*self.mongoadmin.ordering)
# search. move this to get_queryset
# search. move this to get_queryset
q = self.request.GET.get('q')
queryset = self.get_qset(queryset, q)
### Start pagination
### Note:
### Can't use Paginator in Django because mongoengine querysets are
### not the same as Django ORM querysets and it broke.
# Make sure page request is an int. If not, deliver first page.
try:
self.page = int(self.request.GET.get('page', '1'))
except ValueError:
self.page = 1
obj_count = queryset.count()
self.total_pages = math.ceil(obj_count / self.documents_per_page)
if self.page > self.total_pages:
self.page = self.total_pages
if self.page < 1:
self.page = 1
start = (self.page - 1) * self.documents_per_page
end = self.page * self.documents_per_page
queryset = queryset[start:end] if obj_count else queryset
self.queryset = queryset
return queryset
|
def get_initial(self):
"""Used during adding/editing of data."""
self.query = self.get_queryset()
mongo_ids = {'mongo_id': [str(x.id) for x in self.query]}
return mongo_ids
|
def get_context_data(self, **kwargs):
"""Injects data into the context to replicate CBV ListView."""
context = super(DocumentListView, self).get_context_data(**kwargs)
context = self.set_permissions_in_context(context)
if not context['has_view_permission']:
return HttpResponseForbidden("You do not have permissions to view this content.")
context['object_list'] = self.get_queryset()
context['document'] = self.document
context['app_label'] = self.app_label
context['document_name'] = self.document_name
context['request'] = self.request
# pagination bits
context['page'] = self.page
context['documents_per_page'] = self.documents_per_page
if self.page > 1:
previous_page_number = self.page - 1
else:
previous_page_number = None
if self.page < self.total_pages:
next_page_number = self.page + 1
else:
next_page_number = None
context['previous_page_number'] = previous_page_number
context['has_previous_page'] = previous_page_number is not None
context['next_page_number'] = next_page_number
context['has_next_page'] = next_page_number is not None
context['total_pages'] = self.total_pages
# Part of upcoming list view form functionality
if self.queryset.count():
context['keys'] = ['id', ]
# Show those items for which we've got list_fields on the mongoadmin
for key in [x for x in self.mongoadmin.list_fields if x != 'id' and x in self.document._fields.keys()]:
# TODO - Figure out why this EmbeddedDocumentField and ListField breaks this view
# Note - This is the challenge part, right? :)
if isinstance(self.document._fields[key], EmbeddedDocumentField):
continue
if isinstance(self.document._fields[key], ListField):
continue
context['keys'].append(key)
if self.mongoadmin.search_fields:
context['search_field'] = True
return context
|
def post(self, request, *args, **kwargs):
"""Creates new mongoengine records."""
# TODO - make sure to check the rights of the poster
#self.get_queryset() # TODO - write something that grabs the document class better
form_class = self.get_form_class()
form = self.get_form(form_class)
mongo_ids = self.get_initial()['mongo_id']
for form_mongo_id in form.data.getlist('mongo_id'):
for mongo_id in mongo_ids:
if form_mongo_id == mongo_id:
self.document.objects.get(pk=mongo_id).delete()
return self.form_invalid(form)
|
def get_context_data(self, **kwargs):
""" TODO - possibly inherit this from DocumentEditFormView. This is same thing minus:
self.ident = self.kwargs.get('id')
self.document = self.document_type.objects.get(pk=self.ident)
"""
context = super(DocumentAddFormView, self).get_context_data(**kwargs)
self.set_mongoadmin()
context = self.set_permissions_in_context(context)
self.document_type = getattr(self.models, self.document_name)
context['app_label'] = self.app_label
context['document_name'] = self.document_name
context['form_action'] = reverse('document_detail_add_form', args=[self.kwargs.get('app_label'),
self.kwargs.get('document_name')])
return context
|
def get_mongoadmins(self):
""" Returns a list of all mongoadmin implementations for the site """
apps = []
for app_name in settings.INSTALLED_APPS:
mongoadmin = "{0}.mongoadmin".format(app_name)
try:
module = import_module(mongoadmin)
except ImportError as e:
if str(e).startswith("No module named"):
continue
raise e
app_store = AppStore(module)
apps.append(dict(
app_name=app_name,
obj=app_store
))
return apps
|
def set_mongonaut_base(self):
""" Sets a number of commonly used attributes """
if hasattr(self, "app_label"):
# prevents us from calling this multiple times
return None
self.app_label = self.kwargs.get('app_label')
self.document_name = self.kwargs.get('document_name')
# TODO Allow this to be assigned via url variable
self.models_name = self.kwargs.get('models_name', 'models')
# import the models file
self.model_name = "{0}.{1}".format(self.app_label, self.models_name)
self.models = import_module(self.model_name)
|
def set_mongoadmin(self):
""" Returns the MongoAdmin object for an app_label/document_name style view
"""
if hasattr(self, "mongoadmin"):
return None
if not hasattr(self, "document_name"):
self.set_mongonaut_base()
for mongoadmin in self.get_mongoadmins():
for model in mongoadmin['obj'].models:
if model.name == self.document_name:
self.mongoadmin = model.mongoadmin
break
# TODO change this to use 'finally' or 'else' or something
if not hasattr(self, "mongoadmin"):
raise NoMongoAdminSpecified("No MongoAdmin for {0}.{1}".format(self.app_label, self.document_name))
|
def set_permissions_in_context(self, context={}):
""" Provides permissions for mongoadmin for use in the context"""
context['has_view_permission'] = self.mongoadmin.has_view_permission(self.request)
context['has_edit_permission'] = self.mongoadmin.has_edit_permission(self.request)
context['has_add_permission'] = self.mongoadmin.has_add_permission(self.request)
context['has_delete_permission'] = self.mongoadmin.has_delete_permission(self.request)
return context
|
def process_post_form(self, success_message=None):
"""
As long as the form is set on the view this method will validate the form
and save the submitted data. Only call this if you are posting data.
The given success_message will be used with the djanog messages framework
if the posted data sucessfully submits.
"""
# When on initial args are given we need to set the base document.
if not hasattr(self, 'document') or self.document is None:
self.document = self.document_type()
self.form = MongoModelForm(model=self.document_type, instance=self.document,
form_post_data=self.request.POST).get_form()
self.form.is_bound = True
if self.form.is_valid():
self.document_map_dict = MongoModelForm(model=self.document_type).create_document_dictionary(self.document_type)
self.new_document = self.document_type
# Used to keep track of embedded documents in lists. Keyed by the list and the number of the
# document.
self.embedded_list_docs = {}
if self.new_document is None:
messages.error(self.request, u"Failed to save document")
else:
self.new_document = self.new_document()
for form_key in self.form.cleaned_data.keys():
if form_key == 'id' and hasattr(self, 'document'):
self.new_document.id = self.document.id
continue
self.process_document(self.new_document, form_key, None)
self.new_document.save()
if success_message:
messages.success(self.request, success_message)
return self.form
|
def process_document(self, document, form_key, passed_key):
"""
Given the form_key will evaluate the document and set values correctly for
the document given.
"""
if passed_key is not None:
current_key, remaining_key_array = trim_field_key(document, passed_key)
else:
current_key, remaining_key_array = trim_field_key(document, form_key)
key_array_digit = remaining_key_array[-1] if remaining_key_array and has_digit(remaining_key_array) else None
remaining_key = make_key(remaining_key_array)
if current_key.lower() == 'id':
raise KeyError(u"Mongonaut does not work with models which have fields beginning with id_")
# Create boolean checks to make processing document easier
is_embedded_doc = (isinstance(document._fields.get(current_key, None), EmbeddedDocumentField)
if hasattr(document, '_fields') else False)
is_list = not key_array_digit is None
key_in_fields = current_key in document._fields.keys() if hasattr(document, '_fields') else False
# This ensures you only go through each documents keys once, and do not duplicate data
if key_in_fields:
if is_embedded_doc:
self.set_embedded_doc(document, form_key, current_key, remaining_key)
elif is_list:
self.set_list_field(document, form_key, current_key, remaining_key, key_array_digit)
else:
value = translate_value(document._fields[current_key],
self.form.cleaned_data[form_key])
setattr(document, current_key, value)
|
def set_embedded_doc(self, document, form_key, current_key, remaining_key):
"""Get the existing embedded document if it exists, else created it."""
embedded_doc = getattr(document, current_key, False)
if not embedded_doc:
embedded_doc = document._fields[current_key].document_type_obj()
new_key, new_remaining_key_array = trim_field_key(embedded_doc, remaining_key)
self.process_document(embedded_doc, form_key, make_key(new_key, new_remaining_key_array))
setattr(document, current_key, embedded_doc)
|
def set_list_field(self, document, form_key, current_key, remaining_key, key_array_digit):
"""1. Figures out what value the list ought to have
2. Sets the list
"""
document_field = document._fields.get(current_key)
# Figure out what value the list ought to have
# None value for ListFields make mongoengine very un-happy
list_value = translate_value(document_field.field, self.form.cleaned_data[form_key])
if list_value is None or (not list_value and not bool(list_value)):
return None
current_list = getattr(document, current_key, None)
if isinstance(document_field.field, EmbeddedDocumentField):
embedded_list_key = u"{0}_{1}".format(current_key, key_array_digit)
# Get the embedded document if it exists, else create it.
embedded_list_document = self.embedded_list_docs.get(embedded_list_key, None)
if embedded_list_document is None:
embedded_list_document = document_field.field.document_type_obj()
new_key, new_remaining_key_array = trim_field_key(embedded_list_document, remaining_key)
self.process_document(embedded_list_document, form_key, new_key)
list_value = embedded_list_document
self.embedded_list_docs[embedded_list_key] = embedded_list_document
if isinstance(current_list, list):
# Do not add the same document twice
if embedded_list_document not in current_list:
current_list.append(embedded_list_document)
else:
setattr(document, current_key, [embedded_list_document])
elif isinstance(current_list, list):
current_list.append(list_value)
else:
setattr(document, current_key, [list_value])
|
def with_tz(request):
"""
Get the time with TZ enabled
"""
dt = datetime.now()
t = Template('{% load tz %}{% localtime on %}{% get_current_timezone as TIME_ZONE %}{{ TIME_ZONE }}{% endlocaltime %}')
c = RequestContext(request)
response = t.render(c)
return HttpResponse(response)
|
def without_tz(request):
"""
Get the time without TZ enabled
"""
t = Template('{% load tz %}{% get_current_timezone as TIME_ZONE %}{{ TIME_ZONE }}')
c = RequestContext(request)
response = t.render(c)
return HttpResponse(response)
|
def is_valid_ip(ip_address):
""" Check Validity of an IP address """
try:
ip = ipaddress.ip_address(u'' + ip_address)
return True
except ValueError as e:
return False
|
def is_local_ip(ip_address):
""" Check if IP is local """
try:
ip = ipaddress.ip_address(u'' + ip_address)
return ip.is_loopback
except ValueError as e:
return None
|
def process_request(self, request):
"""
If we can get a valid IP from the request,
look up that address in the database to get the appropriate timezone
and activate it.
Else, use the default.
"""
if not request:
return
if not db_loaded:
load_db()
tz = request.session.get('django_timezone')
if not tz:
# use the default timezone (settings.TIME_ZONE) for localhost
tz = timezone.get_default_timezone()
client_ip = get_ip_address_from_request(request)
ip_addrs = client_ip.split(',')
for ip in ip_addrs:
if is_valid_ip(ip) and not is_local_ip(ip):
if ':' in ip:
tz = db_v6.time_zone_by_addr(ip)
break
else:
tz = db.time_zone_by_addr(ip)
break
if tz:
timezone.activate(tz)
request.session['django_timezone'] = str(tz)
if getattr(settings, 'AUTH_USER_MODEL', None) and getattr(request, 'user', None):
detected_timezone.send(sender=get_user_model(), instance=request.user, timezone=tz)
else:
timezone.deactivate()
|
def elastic_query(model, query, session=None, enabled_fields=None):
""" Public method for init the class ElasticQuery
:model: SQLAlchemy model
:query: valid string like a ElasticSearch
:session: SQLAlchemy session *optional
:enabled_fields: Fields allowed for make a query *optional
"""
# TODO: make session to optional
instance = ElasticQuery(model, query, session, enabled_fields)
return instance.search()
|
def search(self):
""" This is the most important method """
try:
filters = json.loads(self.query)
except ValueError:
return False
result = self.model_query
if 'filter'in filters.keys():
result = self.parse_filter(filters['filter'])
if 'sort'in filters.keys():
result = result.order_by(*self.sort(filters['sort']))
return result
|
def parse_filter(self, filters):
""" This method process the filters """
for filter_type in filters:
if filter_type == 'or' or filter_type == 'and':
conditions = []
for field in filters[filter_type]:
if self.is_field_allowed(field):
conditions.append(self.create_query(self.parse_field(field, filters[filter_type][field])))
if filter_type == 'or':
self.model_query = self.model_query.filter(or_(*conditions))
elif filter_type == 'and':
self.model_query = self.model_query.filter(and_(*conditions))
else:
if self.is_field_allowed(filter_type):
conditions = self.create_query(self.parse_field(filter_type, filters[filter_type]))
self.model_query = self.model_query.filter(conditions)
return self.model_query
|
def parse_field(self, field, field_value):
""" Parse the operators and traduce: ES to SQLAlchemy operators """
if type(field_value) is dict:
# TODO: check operators and emit error
operator = list(field_value)[0]
if self.verify_operator(operator) is False:
return "Error: operator does not exist", operator
value = field_value[operator]
elif type(field_value) is unicode:
operator = u'equals'
value = field_value
return field, operator, value
|
def create_query(self, attr):
""" Mix all values and make the query """
field = attr[0]
operator = attr[1]
value = attr[2]
model = self.model
if '.' in field:
field_items = field.split('.')
field_name = getattr(model, field_items[0], None)
class_name = field_name.property.mapper.class_
new_model = getattr(class_name, field_items[1])
return field_name.has(OPERATORS[operator](new_model, value))
return OPERATORS[operator](getattr(model, field, None), value)
|
def sort(self, sort_list):
""" Sort """
order = []
for sort in sort_list:
if sort_list[sort] == "asc":
order.append(asc(getattr(self.model, sort, None)))
elif sort_list[sort] == "desc":
order.append(desc(getattr(self.model, sort, None)))
return order
|
def sendmail(self, msg_from, msg_to, msg):
"""Remember the recipients."""
SMTP_dummy.msg_from = msg_from
SMTP_dummy.msg_to = msg_to
SMTP_dummy.msg = msg
|
def parsemail(raw_message):
"""Parse message headers, then remove BCC header."""
message = email.parser.Parser().parsestr(raw_message)
# Detect encoding
detected = chardet.detect(bytearray(raw_message, "utf-8"))
encoding = detected["encoding"]
print(">>> encoding {}".format(encoding))
for part in message.walk():
if part.get_content_maintype() == 'multipart':
continue
part.set_charset(encoding)
# Extract recipients
addrs = email.utils.getaddresses(message.get_all("TO", [])) + \
email.utils.getaddresses(message.get_all("CC", [])) + \
email.utils.getaddresses(message.get_all("BCC", []))
recipients = [x[1] for x in addrs]
message.__delitem__("bcc")
message.__setitem__('Date', email.utils.formatdate())
sender = message["from"]
return (message, sender, recipients)
|
def _create_boundary(message):
"""Add boundary parameter to multipart message if they are not present."""
if not message.is_multipart() or message.get_boundary() is not None:
return message
# HACK: Python2 lists do not natively have a `copy` method. Unfortunately,
# due to a bug in the Backport for the email module, the method
# `Message.set_boundary` converts the Message headers into a native list,
# so that other methods that rely on "copying" the Message headers fail.
# `Message.set_boundary` is called from `Generator.handle_multipart` if the
# message does not already have a boundary present. (This method itself is
# called from `Message.as_string`.)
# Hence, to prevent `Message.set_boundary` from being called, add a
# boundary header manually.
from future.backports.email.generator import Generator
# pylint: disable=protected-access
boundary = Generator._make_boundary(message.policy.linesep)
message.set_param('boundary', boundary)
return message
|
def make_message_multipart(message):
"""Convert a message into a multipart message."""
if not message.is_multipart():
multipart_message = email.mime.multipart.MIMEMultipart('alternative')
for header_key in set(message.keys()):
# Preserve duplicate headers
values = message.get_all(header_key, failobj=[])
for value in values:
multipart_message[header_key] = value
original_text = message.get_payload()
multipart_message.attach(email.mime.text.MIMEText(original_text))
message = multipart_message
# HACK: For Python2 (see comments in `_create_boundary`)
message = _create_boundary(message)
return message
|
def convert_markdown(message):
"""Convert markdown in message text to HTML."""
assert message['Content-Type'].startswith("text/markdown")
del message['Content-Type']
# Convert the text from markdown and then make the message multipart
message = make_message_multipart(message)
for payload_item in set(message.get_payload()):
# Assume the plaintext item is formatted with markdown.
# Add corresponding HTML version of the item as the last part of
# the multipart message (as per RFC 2046)
if payload_item['Content-Type'].startswith('text/plain'):
original_text = payload_item.get_payload()
html_text = markdown.markdown(original_text)
html_payload = future.backports.email.mime.text.MIMEText(
"<html><body>{}</body></html>".format(html_text),
"html",
)
message.attach(html_payload)
return message
|
def addattachments(message, template_path):
"""Add the attachments from the message from the commandline options."""
if 'attachment' not in message:
return message, 0
message = make_message_multipart(message)
attachment_filepaths = message.get_all('attachment', failobj=[])
template_parent_dir = os.path.dirname(template_path)
for attachment_filepath in attachment_filepaths:
attachment_filepath = os.path.expanduser(attachment_filepath.strip())
if not attachment_filepath:
continue
if not os.path.isabs(attachment_filepath):
# Relative paths are relative to the template's parent directory
attachment_filepath = os.path.join(template_parent_dir,
attachment_filepath)
normalized_path = os.path.abspath(attachment_filepath)
# Check that the attachment exists
if not os.path.exists(normalized_path):
print("Error: can't find attachment " + normalized_path)
sys.exit(1)
filename = os.path.basename(normalized_path)
with open(normalized_path, "rb") as attachment:
part = email.mime.application.MIMEApplication(attachment.read(),
Name=filename)
part.add_header('Content-Disposition',
'attachment; filename="{}"'.format(filename))
message.attach(part)
print(">>> attached {}".format(normalized_path))
del message['attachment']
return message, len(attachment_filepaths)
|
def sendmail(message, sender, recipients, config_filename):
"""Send email message using Python SMTP library."""
# Read config file from disk to get SMTP server host, port, username
if not hasattr(sendmail, "host"):
config = configparser.RawConfigParser()
config.read(config_filename)
sendmail.host = config.get("smtp_server", "host")
sendmail.port = config.getint("smtp_server", "port")
sendmail.username = config.get("smtp_server", "username")
sendmail.security = config.get("smtp_server", "security")
print(">>> Read SMTP server configuration from {}".format(
config_filename))
print(">>> host = {}".format(sendmail.host))
print(">>> port = {}".format(sendmail.port))
print(">>> username = {}".format(sendmail.username))
print(">>> security = {}".format(sendmail.security))
# Prompt for password
if not hasattr(sendmail, "password"):
if sendmail.security == "Dummy" or sendmail.username == "None":
sendmail.password = None
else:
prompt = ">>> password for {} on {}: ".format(sendmail.username,
sendmail.host)
sendmail.password = getpass.getpass(prompt)
# Connect to SMTP server
if sendmail.security == "SSL/TLS":
smtp = smtplib.SMTP_SSL(sendmail.host, sendmail.port)
elif sendmail.security == "STARTTLS":
smtp = smtplib.SMTP(sendmail.host, sendmail.port)
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
elif sendmail.security == "Never":
smtp = smtplib.SMTP(sendmail.host, sendmail.port)
elif sendmail.security == "Dummy":
smtp = smtp_dummy.SMTP_dummy()
else:
raise configparser.Error("Unrecognized security type: {}".format(
sendmail.security))
# Send credentials
if sendmail.username != "None":
smtp.login(sendmail.username, sendmail.password)
# Send message. Note that we can't use the elegant
# "smtp.send_message(message)" because that's python3 only
smtp.sendmail(sender, recipients, message.as_string())
smtp.close()
|
def create_sample_input_files(template_filename,
database_filename,
config_filename):
"""Create sample template email and database."""
print("Creating sample template email {}".format(template_filename))
if os.path.exists(template_filename):
print("Error: file exists: " + template_filename)
sys.exit(1)
with io.open(template_filename, "w") as template_file:
template_file.write(
u"TO: {{email}}\n"
u"SUBJECT: Testing mailmerge\n"
u"FROM: My Self <[email protected]>\n"
u"\n"
u"Hi, {{name}},\n"
u"\n"
u"Your number is {{number}}.\n"
)
print("Creating sample database {}".format(database_filename))
if os.path.exists(database_filename):
print("Error: file exists: " + database_filename)
sys.exit(1)
with io.open(database_filename, "w") as database_file:
database_file.write(
u'email,name,number\n'
u'[email protected],"Myself",17\n'
u'[email protected],"Bob",42\n'
)
print("Creating sample config file {}".format(config_filename))
if os.path.exists(config_filename):
print("Error: file exists: " + config_filename)
sys.exit(1)
with io.open(config_filename, "w") as config_file:
config_file.write(
u"# Example: GMail\n"
u"[smtp_server]\n"
u"host = smtp.gmail.com\n"
u"port = 465\n"
u"security = SSL/TLS\n"
u"username = YOUR_USERNAME_HERE\n"
u"#\n"
u"# Example: Wide open\n"
u"# [smtp_server]\n"
u"# host = open-smtp.example.com\n"
u"# port = 25\n"
u"# security = Never\n"
u"# username = None\n"
u"#\n"
u"# Example: University of Michigan\n"
u"# [smtp_server]\n"
u"# host = smtp.mail.umich.edu\n"
u"# port = 465\n"
u"# security = SSL/TLS\n"
u"# username = YOUR_USERNAME_HERE\n"
u"#\n"
u"# Example: University of Michigan EECS Dept., with STARTTLS security\n" # noqa: E501
u"# [smtp_server]\n"
u"# host = newman.eecs.umich.edu\n"
u"# port = 25\n"
u"# security = STARTTLS\n"
u"# username = YOUR_USERNAME_HERE\n"
u"#\n"
u"# Example: University of Michigan EECS Dept., with no encryption\n" # noqa: E501
u"# [smtp_server]\n"
u"# host = newman.eecs.umich.edu\n"
u"# port = 25\n"
u"# security = Never\n"
u"# username = YOUR_USERNAME_HERE\n"
)
print("Edit these files, and then run mailmerge again")
|
def main(sample=False,
dry_run=True,
limit=1,
no_limit=False,
database_filename=DATABASE_FILENAME_DEFAULT,
template_filename=TEMPLATE_FILENAME_DEFAULT,
config_filename=CONFIG_FILENAME_DEFAULT):
"""Python API for mailmerge.
mailmerge 0.1 by Andrew DeOrio <[email protected]>.
A simple, command line mail merge tool.
Render an email template for each line in a CSV database.
"""
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
# pylint: disable=too-many-statements
# NOTE: this function needs a refactor, then remove ^^^
# Create a sample email template and database if there isn't one already
if sample:
create_sample_input_files(
template_filename,
database_filename,
config_filename,
)
sys.exit(0)
if not os.path.exists(template_filename):
print("Error: can't find template email " + template_filename)
print("Create a sample (--sample) or specify a file (--template)")
sys.exit(1)
if not os.path.exists(database_filename):
print("Error: can't find database_filename " + database_filename)
print("Create a sample (--sample) or specify a file (--database)")
sys.exit(1)
try:
# Read template
with io.open(template_filename, "r") as template_file:
content = template_file.read() + u"\n"
template = jinja2.Template(content)
# Read CSV file database
database = []
with io.open(database_filename, "r") as database_file:
reader = csv.DictReader(database_file)
for row in reader:
database.append(row)
# Each row corresponds to one email message
for i, row in enumerate(database):
if not no_limit and i >= limit:
break
# Fill in template fields using fields from row of CSV file
raw_message = template.render(**row)
# Parse message headers and detect encoding
(message, sender, recipients) = parsemail(raw_message)
# Convert message from markdown to HTML if requested
if message['Content-Type'].startswith("text/markdown"):
message = convert_markdown(message)
print(">>> message {}".format(i))
print(message.as_string())
# Add attachments if any
(message, num_attachments) = addattachments(message,
template_filename)
# Send message
if dry_run:
print(">>> sent message {} DRY RUN".format(i))
else:
# Send message
try:
sendmail(message, sender, recipients, config_filename)
except smtplib.SMTPException as err:
print(">>> failed to send message {}".format(i))
timestamp = '{:%Y-%m-%d %H:%M:%S}'.format(
datetime.datetime.now()
)
print(timestamp, i, err, sep=' ', file=sys.stderr)
else:
print(">>> sent message {}".format(i))
# Hints for user
if num_attachments == 0:
print(">>> No attachments were sent with the emails.")
if not no_limit:
print(">>> Limit was {} messages. ".format(limit) +
"To remove the limit, use the --no-limit option.")
if dry_run:
print((">>> This was a dry run. "
"To send messages, use the --no-dry-run option."))
except jinja2.exceptions.TemplateError as err:
print(">>> Error in Jinja2 template: {}".format(err))
sys.exit(1)
except csv.Error as err:
print(">>> Error reading CSV file: {}".format(err))
sys.exit(1)
except smtplib.SMTPAuthenticationError as err:
print(">>> Authentication error: {}".format(err))
sys.exit(1)
except configparser.Error as err:
print(">>> Error reading config file {}: {}".format(
config_filename, err))
sys.exit(1)
|
def cli(sample, dry_run, limit, no_limit,
database_filename, template_filename, config_filename):
"""Command line interface."""
# pylint: disable=too-many-arguments
mailmerge.api.main(
sample=sample,
dry_run=dry_run,
limit=limit,
no_limit=no_limit,
database_filename=database_filename,
template_filename=template_filename,
config_filename=config_filename,
)
|
def _tailCallback(f, uid):
"""
This is the "callable" version of the continuation, which sould only
be accessible from the inside of the function to be continued. An
attribute called "C" can be used in order to get back the public
version of the continuation (for passing the continuation to another
function).
"""
def t(*args):
raise _TailCall(f, args, uid)
t.C = f
return t
|
def with_continuations(**c):
"""
A decorator for defining tail-call optimized functions.
Example
-------
@with_continuations()
def factorial(n, k, self=None):
return self(n-1, k*n) if n > 1 else k
@with_continuations()
def identity(x, self=None):
return x
@with_continuations(out=identity)
def factorial2(n, k, self=None, out=None):
return self(n-1, k*n) if n > 1 else out(k)
print(factorial(7,1))
print(factorial2(7,1))
"""
if len(c): keys, k = zip(*c.items())
else: keys, k = tuple([]), tuple([])
def d(f):
return C(
lambda kself, *conts:
lambda *args:
f(*args, self=kself, **dict(zip(keys, conts)))) (*k)
return d
|
def parse_int_list(string):
"""
Parses a string of numbers and ranges into a list of integers. Ranges
are separated by dashes and inclusive of both the start and end number.
Example:
parse_int_list("8 9 10,11-13") == [8,9,10,11,12,13]
"""
integers = []
for comma_part in string.split(","):
for substring in comma_part.split(" "):
if len(substring) == 0:
continue
if "-" in substring:
left, right = substring.split("-")
left_val = int(left.strip())
right_val = int(right.strip())
integers.extend(range(left_val, right_val + 1))
else:
integers.append(int(substring.strip()))
return integers
|
def sanitize_params(method, **kwargs):
"""
Request params can be extracted from the ``**kwargs``
Arguments starting with `_` will be stripped from it, so they
can be used as an argument for the request
(eg. "_headers" → "headers" in the kwargs returned by this
function while "headers" would be inserted into the parameters
of the request)
Parameters
----------
method : str
method to use to make the request
kwargs : dict
Keywords arguments given to the request
Returns
-------
dict
New requests parameters, correctly formatted
"""
# items which does not have a key starting with `_`
items = [(key, value) for key, value in kwargs.items()
if not key.startswith("_")]
params, skip_params = {}, False
for key, value in items:
# binary data
if hasattr(value, 'read') or isinstance(value, bytes):
params[key] = value
# The params won't be used to make the signature
skip_params = True
# booleans conversion
elif isinstance(value, bool):
params[key] = "true" if value else "false"
# iterables conversion
elif isinstance(value, iterable):
params[key] = ",".join(map(str, value))
# skip params with value None
elif value is None:
pass
# the rest is converted to str
# (make sure you don't send something wrong)
else:
params[key] = str(value)
# dict with other items (+ strip "_" from keys)
kwargs = {key[1:]: value for key, value in kwargs.items()
if key.startswith("_")}
if method == "post" and not kwargs.get('data', None) and params:
kwargs['data'] = params # post requests use the data argument
elif not kwargs.get('params', None) and params:
kwargs['params'] = params
return kwargs, skip_params
|
def _get_base_url(base_url, api, version):
"""
create the base url for the api
Parameters
----------
base_url : str
format of the base_url using {api} and {version}
api : str
name of the api to use
version : str
version of the api
Returns
-------
str
the base url of the api you want to use
"""
format_args = {}
if "{api}" in base_url:
if api == "":
base_url = base_url.replace('{api}.', '')
else:
format_args['api'] = api
if "{version}" in base_url:
if version == "":
base_url = base_url.replace('/{version}', '')
else:
format_args['version'] = version
return base_url.format(api=api, version=version)
|
async def request(self, method, url, future,
headers=None,
session=None,
encoding=None,
**kwargs):
"""
Make requests to the REST API
Parameters
----------
future : asyncio.Future
Future used to return the response
method : str
Method to be used by the request
url : str
URL of the resource
headers : .oauth.PeonyHeaders
Custom headers (doesn't overwrite `Authorization` headers)
session : aiohttp.ClientSession, optional
Client session used to make the request
Returns
-------
data.PeonyResponse
Response to the request
"""
await self.setup
# prepare request arguments, particularly the headers
req_kwargs = await self.headers.prepare_request(
method=method,
url=url,
headers=headers,
proxy=self.proxy,
**kwargs
)
if encoding is None:
encoding = self.encoding
session = session if (session is not None) else self._session
logger.debug("making request with parameters: %s" % req_kwargs)
async with session.request(**req_kwargs) as response:
if response.status < 400:
data = await data_processing.read(response, self._loads,
encoding=encoding)
future.set_result(data_processing.PeonyResponse(
data=data,
headers=response.headers,
url=response.url,
request=req_kwargs
))
else: # throw exception if status is not 2xx
await exceptions.throw(response, loads=self._loads,
encoding=encoding, url=url)
|
def stream_request(self, method, url, headers=None, _session=None,
*args, **kwargs):
"""
Make requests to the Streaming API
Parameters
----------
method : str
Method to be used by the request
url : str
URL of the resource
headers : dict
Custom headers (doesn't overwrite `Authorization` headers)
_session : aiohttp.ClientSession, optional
The session to use for this specific request, the session
given as argument of :meth:`__init__` is used by default
Returns
-------
.stream.StreamResponse
Stream context for the request
"""
return StreamResponse(
method=method,
url=url,
client=self,
headers=headers,
session=_session,
proxy=self.proxy,
**kwargs
)
|
def get_tasks(self):
"""
Get the tasks attached to the instance
Returns
-------
list
List of tasks (:class:`asyncio.Task`)
"""
tasks = self._get_tasks()
tasks.extend(self._streams.get_tasks(self))
return tasks
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.