repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.send
def send(self, output_name, frame): """Send an output frame. The frame is sent to each input the output is connected to. If there are no connections this is a null operation with little overhead. :param str output_name: the output to use. Must be a member of :py:attr:`~Component.outputs`. :param Frame frame: the frame to send. """ for input_method in self._component_connections[output_name]: input_method(frame)
python
def send(self, output_name, frame): """Send an output frame. The frame is sent to each input the output is connected to. If there are no connections this is a null operation with little overhead. :param str output_name: the output to use. Must be a member of :py:attr:`~Component.outputs`. :param Frame frame: the frame to send. """ for input_method in self._component_connections[output_name]: input_method(frame)
[ "def", "send", "(", "self", ",", "output_name", ",", "frame", ")", ":", "for", "input_method", "in", "self", ".", "_component_connections", "[", "output_name", "]", ":", "input_method", "(", "frame", ")" ]
Send an output frame. The frame is sent to each input the output is connected to. If there are no connections this is a null operation with little overhead. :param str output_name: the output to use. Must be a member of :py:attr:`~Component.outputs`. :param Frame frame: the frame to send.
[ "Send", "an", "output", "frame", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L280-L294
train
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.start_event
def start_event(self): """Called by the event loop when it is started. Creates the output frame pools (if used) then calls :py:meth:`on_start`. Creating the output frame pools now allows their size to be configured before starting the component. """ # create object pool for each output if self.with_outframe_pool: self.update_config() for name in self.outputs: self.outframe_pool[name] = ObjectPool( Frame, self.new_frame, self.config['outframe_pool_len']) try: self.on_start() except Exception as ex: self.logger.exception(ex) raise StopIteration()
python
def start_event(self): """Called by the event loop when it is started. Creates the output frame pools (if used) then calls :py:meth:`on_start`. Creating the output frame pools now allows their size to be configured before starting the component. """ # create object pool for each output if self.with_outframe_pool: self.update_config() for name in self.outputs: self.outframe_pool[name] = ObjectPool( Frame, self.new_frame, self.config['outframe_pool_len']) try: self.on_start() except Exception as ex: self.logger.exception(ex) raise StopIteration()
[ "def", "start_event", "(", "self", ")", ":", "# create object pool for each output", "if", "self", ".", "with_outframe_pool", ":", "self", ".", "update_config", "(", ")", "for", "name", "in", "self", ".", "outputs", ":", "self", ".", "outframe_pool", "[", "name", "]", "=", "ObjectPool", "(", "Frame", ",", "self", ".", "new_frame", ",", "self", ".", "config", "[", "'outframe_pool_len'", "]", ")", "try", ":", "self", ".", "on_start", "(", ")", "except", "Exception", "as", "ex", ":", "self", ".", "logger", ".", "exception", "(", "ex", ")", "raise", "StopIteration", "(", ")" ]
Called by the event loop when it is started. Creates the output frame pools (if used) then calls :py:meth:`on_start`. Creating the output frame pools now allows their size to be configured before starting the component.
[ "Called", "by", "the", "event", "loop", "when", "it", "is", "started", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L324-L342
train
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.stop_event
def stop_event(self): """Called by the event loop when it is stopped. Calls :py:meth:`on_stop`, then sends :py:data:`None` to each output to shut down the rest of the processing pipeline. """ self.logger.debug('stopping') try: self.on_stop() except Exception as ex: self.logger.exception(ex) for name in self.outputs: self.send(name, None)
python
def stop_event(self): """Called by the event loop when it is stopped. Calls :py:meth:`on_stop`, then sends :py:data:`None` to each output to shut down the rest of the processing pipeline. """ self.logger.debug('stopping') try: self.on_stop() except Exception as ex: self.logger.exception(ex) for name in self.outputs: self.send(name, None)
[ "def", "stop_event", "(", "self", ")", ":", "self", ".", "logger", ".", "debug", "(", "'stopping'", ")", "try", ":", "self", ".", "on_stop", "(", ")", "except", "Exception", "as", "ex", ":", "self", ".", "logger", ".", "exception", "(", "ex", ")", "for", "name", "in", "self", ".", "outputs", ":", "self", ".", "send", "(", "name", ",", "None", ")" ]
Called by the event loop when it is stopped. Calls :py:meth:`on_stop`, then sends :py:data:`None` to each output to shut down the rest of the processing pipeline.
[ "Called", "by", "the", "event", "loop", "when", "it", "is", "stopped", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L348-L361
train
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.is_pipe_end
def is_pipe_end(self): """Is component the last one in a pipeline. When waiting for a network of components to finish processing it's not necessary to wait for every component to stop, and in many cases they won't all stop anyway. This method makes it easier to choose which components to wait for. See the :py:mod:`Compound <.compound>` component for an example. :rtype: :py:class:`bool` """ for name in self.outputs: if self._component_connections[name]: return False return True
python
def is_pipe_end(self): """Is component the last one in a pipeline. When waiting for a network of components to finish processing it's not necessary to wait for every component to stop, and in many cases they won't all stop anyway. This method makes it easier to choose which components to wait for. See the :py:mod:`Compound <.compound>` component for an example. :rtype: :py:class:`bool` """ for name in self.outputs: if self._component_connections[name]: return False return True
[ "def", "is_pipe_end", "(", "self", ")", ":", "for", "name", "in", "self", ".", "outputs", ":", "if", "self", ".", "_component_connections", "[", "name", "]", ":", "return", "False", "return", "True" ]
Is component the last one in a pipeline. When waiting for a network of components to finish processing it's not necessary to wait for every component to stop, and in many cases they won't all stop anyway. This method makes it easier to choose which components to wait for. See the :py:mod:`Compound <.compound>` component for an example. :rtype: :py:class:`bool`
[ "Is", "component", "the", "last", "one", "in", "a", "pipeline", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L363-L380
train
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.new_config_event
def new_config_event(self): """Called by the event loop when new config is available. """ try: self.on_set_config() except Exception as ex: self.logger.exception(ex) raise StopIteration()
python
def new_config_event(self): """Called by the event loop when new config is available. """ try: self.on_set_config() except Exception as ex: self.logger.exception(ex) raise StopIteration()
[ "def", "new_config_event", "(", "self", ")", ":", "try", ":", "self", ".", "on_set_config", "(", ")", "except", "Exception", "as", "ex", ":", "self", ".", "logger", ".", "exception", "(", "ex", ")", "raise", "StopIteration", "(", ")" ]
Called by the event loop when new config is available.
[ "Called", "by", "the", "event", "loop", "when", "new", "config", "is", "available", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L389-L397
train
jim-easterbrook/pyctools
src/pyctools/core/base.py
Component.new_frame_event
def new_frame_event(self): """Called by the event loop when a new input or output frame is available. Inputs are correlated by comparing their frame numbers. If there is a complete set of inputs, and all output frame pools are ready, the :py:meth:`process_frame` method is called. If an input frame has a negative frame number it is not correlated with other inputs, it is merely required to exist. This allows frame objects to be used as control inputs when processing video sequences. The derived class should use the input buffer's :py:meth:`~InputBuffer.peek` method to get the frame without removing it from the buffer. See the :py:class:`~pyctools.components.colourspace.matrix.Matrix` component for an example. """ # check output frames are available for out_pool in self.outframe_pool.values(): if not out_pool.available(): return # check input frames are available, and get current frame numbers frame_nos = {} for in_buff in self.input_buffer.values(): if not in_buff.available(): return in_frame = in_buff.peek() if in_frame is None: raise StopIteration() if in_frame.frame_no >= 0: frame_nos[in_buff] = in_frame.frame_no else: # discard any superseded 'static' input while in_buff.available() > 1 and in_buff.peek(1) is not None: in_buff.get() if len(frame_nos) > 1: frame_no = max(frame_nos.values()) # discard old frames that can never be used for in_buff in frame_nos: while frame_nos[in_buff] < frame_no and in_buff.available() > 1: in_buff.get() in_frame = in_buff.peek() if in_frame is None: raise StopIteration() frame_nos[in_buff] = in_frame.frame_no # check for complete set of matching frame numbers if min(frame_nos.values()) != max(frame_nos.values()): return # now have a full set of correlated inputs to process try: self.process_frame() except StopIteration: raise except Exception as ex: self.logger.exception(ex) raise StopIteration()
python
def new_frame_event(self): """Called by the event loop when a new input or output frame is available. Inputs are correlated by comparing their frame numbers. If there is a complete set of inputs, and all output frame pools are ready, the :py:meth:`process_frame` method is called. If an input frame has a negative frame number it is not correlated with other inputs, it is merely required to exist. This allows frame objects to be used as control inputs when processing video sequences. The derived class should use the input buffer's :py:meth:`~InputBuffer.peek` method to get the frame without removing it from the buffer. See the :py:class:`~pyctools.components.colourspace.matrix.Matrix` component for an example. """ # check output frames are available for out_pool in self.outframe_pool.values(): if not out_pool.available(): return # check input frames are available, and get current frame numbers frame_nos = {} for in_buff in self.input_buffer.values(): if not in_buff.available(): return in_frame = in_buff.peek() if in_frame is None: raise StopIteration() if in_frame.frame_no >= 0: frame_nos[in_buff] = in_frame.frame_no else: # discard any superseded 'static' input while in_buff.available() > 1 and in_buff.peek(1) is not None: in_buff.get() if len(frame_nos) > 1: frame_no = max(frame_nos.values()) # discard old frames that can never be used for in_buff in frame_nos: while frame_nos[in_buff] < frame_no and in_buff.available() > 1: in_buff.get() in_frame = in_buff.peek() if in_frame is None: raise StopIteration() frame_nos[in_buff] = in_frame.frame_no # check for complete set of matching frame numbers if min(frame_nos.values()) != max(frame_nos.values()): return # now have a full set of correlated inputs to process try: self.process_frame() except StopIteration: raise except Exception as ex: self.logger.exception(ex) raise StopIteration()
[ "def", "new_frame_event", "(", "self", ")", ":", "# check output frames are available", "for", "out_pool", "in", "self", ".", "outframe_pool", ".", "values", "(", ")", ":", "if", "not", "out_pool", ".", "available", "(", ")", ":", "return", "# check input frames are available, and get current frame numbers", "frame_nos", "=", "{", "}", "for", "in_buff", "in", "self", ".", "input_buffer", ".", "values", "(", ")", ":", "if", "not", "in_buff", ".", "available", "(", ")", ":", "return", "in_frame", "=", "in_buff", ".", "peek", "(", ")", "if", "in_frame", "is", "None", ":", "raise", "StopIteration", "(", ")", "if", "in_frame", ".", "frame_no", ">=", "0", ":", "frame_nos", "[", "in_buff", "]", "=", "in_frame", ".", "frame_no", "else", ":", "# discard any superseded 'static' input", "while", "in_buff", ".", "available", "(", ")", ">", "1", "and", "in_buff", ".", "peek", "(", "1", ")", "is", "not", "None", ":", "in_buff", ".", "get", "(", ")", "if", "len", "(", "frame_nos", ")", ">", "1", ":", "frame_no", "=", "max", "(", "frame_nos", ".", "values", "(", ")", ")", "# discard old frames that can never be used", "for", "in_buff", "in", "frame_nos", ":", "while", "frame_nos", "[", "in_buff", "]", "<", "frame_no", "and", "in_buff", ".", "available", "(", ")", ">", "1", ":", "in_buff", ".", "get", "(", ")", "in_frame", "=", "in_buff", ".", "peek", "(", ")", "if", "in_frame", "is", "None", ":", "raise", "StopIteration", "(", ")", "frame_nos", "[", "in_buff", "]", "=", "in_frame", ".", "frame_no", "# check for complete set of matching frame numbers", "if", "min", "(", "frame_nos", ".", "values", "(", ")", ")", "!=", "max", "(", "frame_nos", ".", "values", "(", ")", ")", ":", "return", "# now have a full set of correlated inputs to process", "try", ":", "self", ".", "process_frame", "(", ")", "except", "StopIteration", ":", "raise", "except", "Exception", "as", "ex", ":", "self", ".", "logger", ".", "exception", "(", "ex", ")", "raise", "StopIteration", "(", ")" ]
Called by the event loop when a new input or output frame is available. Inputs are correlated by comparing their frame numbers. If there is a complete set of inputs, and all output frame pools are ready, the :py:meth:`process_frame` method is called. If an input frame has a negative frame number it is not correlated with other inputs, it is merely required to exist. This allows frame objects to be used as control inputs when processing video sequences. The derived class should use the input buffer's :py:meth:`~InputBuffer.peek` method to get the frame without removing it from the buffer. See the :py:class:`~pyctools.components.colourspace.matrix.Matrix` component for an example.
[ "Called", "by", "the", "event", "loop", "when", "a", "new", "input", "or", "output", "frame", "is", "available", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L410-L466
train
kavdev/ldap-groups
ldap_groups/groups.py
ADGroup.get_tree_members
def get_tree_members(self): """ Retrieves all members from this node of the tree down.""" members = [] queue = deque() queue.appendleft(self) visited = set() while len(queue): node = queue.popleft() if node not in visited: members.extend(node.get_member_info()) queue.extendleft(node.get_children()) visited.add(node) return [{attribute: member.get(attribute) for attribute in self.attr_list} for member in members if member]
python
def get_tree_members(self): """ Retrieves all members from this node of the tree down.""" members = [] queue = deque() queue.appendleft(self) visited = set() while len(queue): node = queue.popleft() if node not in visited: members.extend(node.get_member_info()) queue.extendleft(node.get_children()) visited.add(node) return [{attribute: member.get(attribute) for attribute in self.attr_list} for member in members if member]
[ "def", "get_tree_members", "(", "self", ")", ":", "members", "=", "[", "]", "queue", "=", "deque", "(", ")", "queue", ".", "appendleft", "(", "self", ")", "visited", "=", "set", "(", ")", "while", "len", "(", "queue", ")", ":", "node", "=", "queue", ".", "popleft", "(", ")", "if", "node", "not", "in", "visited", ":", "members", ".", "extend", "(", "node", ".", "get_member_info", "(", ")", ")", "queue", ".", "extendleft", "(", "node", ".", "get_children", "(", ")", ")", "visited", ".", "add", "(", "node", ")", "return", "[", "{", "attribute", ":", "member", ".", "get", "(", "attribute", ")", "for", "attribute", "in", "self", ".", "attr_list", "}", "for", "member", "in", "members", "if", "member", "]" ]
Retrieves all members from this node of the tree down.
[ "Retrieves", "all", "members", "from", "this", "node", "of", "the", "tree", "down", "." ]
0dd3a7d9eafa3903127364839b12a4b3dd3ca521
https://github.com/kavdev/ldap-groups/blob/0dd3a7d9eafa3903127364839b12a4b3dd3ca521/ldap_groups/groups.py#L464-L480
train
Parsely/schemato
schemato/utils.py
deepest_node
def deepest_node((subj, pred, obj), graph): """recurse down the tree and return a list of the most deeply nested child nodes of the given triple""" # i don't fully accept the premise that this docstring presents # i'm not a docstring literalist to_return = [] def _deepest_node((subj, pred, obj), graph): children = [] if isinstance(obj, rt.BNode): for s, p, o in graph: if str(s) == str(obj): children.append((s, p, o)) for s, p, o in children: s1, p1, o1 = _deepest_node((s, p, o), graph) # coupling *smacks hand with ruler* if "rNews" in str(o1) and (s1, p1, o1) not in to_return: to_return.append((s1, p1, o1)) return (s1, p1, o1) else: return (subj, pred, obj) _deepest_node((subj, pred, obj), graph) return to_return
python
def deepest_node((subj, pred, obj), graph): """recurse down the tree and return a list of the most deeply nested child nodes of the given triple""" # i don't fully accept the premise that this docstring presents # i'm not a docstring literalist to_return = [] def _deepest_node((subj, pred, obj), graph): children = [] if isinstance(obj, rt.BNode): for s, p, o in graph: if str(s) == str(obj): children.append((s, p, o)) for s, p, o in children: s1, p1, o1 = _deepest_node((s, p, o), graph) # coupling *smacks hand with ruler* if "rNews" in str(o1) and (s1, p1, o1) not in to_return: to_return.append((s1, p1, o1)) return (s1, p1, o1) else: return (subj, pred, obj) _deepest_node((subj, pred, obj), graph) return to_return
[ "def", "deepest_node", "(", "(", "subj", ",", "pred", ",", "obj", ")", ",", "graph", ")", ":", "# i don't fully accept the premise that this docstring presents", "# i'm not a docstring literalist", "to_return", "=", "[", "]", "def", "_deepest_node", "(", "(", "subj", ",", "pred", ",", "obj", ")", ",", "graph", ")", ":", "children", "=", "[", "]", "if", "isinstance", "(", "obj", ",", "rt", ".", "BNode", ")", ":", "for", "s", ",", "p", ",", "o", "in", "graph", ":", "if", "str", "(", "s", ")", "==", "str", "(", "obj", ")", ":", "children", ".", "append", "(", "(", "s", ",", "p", ",", "o", ")", ")", "for", "s", ",", "p", ",", "o", "in", "children", ":", "s1", ",", "p1", ",", "o1", "=", "_deepest_node", "(", "(", "s", ",", "p", ",", "o", ")", ",", "graph", ")", "# coupling *smacks hand with ruler*", "if", "\"rNews\"", "in", "str", "(", "o1", ")", "and", "(", "s1", ",", "p1", ",", "o1", ")", "not", "in", "to_return", ":", "to_return", ".", "append", "(", "(", "s1", ",", "p1", ",", "o1", ")", ")", "return", "(", "s1", ",", "p1", ",", "o1", ")", "else", ":", "return", "(", "subj", ",", "pred", ",", "obj", ")", "_deepest_node", "(", "(", "subj", ",", "pred", ",", "obj", ")", ",", "graph", ")", "return", "to_return" ]
recurse down the tree and return a list of the most deeply nested child nodes of the given triple
[ "recurse", "down", "the", "tree", "and", "return", "a", "list", "of", "the", "most", "deeply", "nested", "child", "nodes", "of", "the", "given", "triple" ]
7002316fbcd52f2e669f8372bf1338c572e3df4b
https://github.com/Parsely/schemato/blob/7002316fbcd52f2e669f8372bf1338c572e3df4b/schemato/utils.py#L4-L27
train
magopian/django-data-exports
data_exports/templatetags/getter_tags.py
getattribute
def getattribute(model, item): """Chained lookup of item on model If item has dots (eg: 'foo.bar.baz'), recursively call getattribute(): e = getattr(model, 'foo') e = getattr(e, 'bar') e = getattr(e, 'baz') At each step, check if e is a callable, and if so, use e() """ elements = item.split('.') element = elements.pop(0) try: attr = getattr(model, element, None) except: return if attr is None: # end of recursion return if callable(attr): try: attr = attr() except: # couldn't call this method without params return if elements: return getattribute(attr, '.'.join(elements)) return attr
python
def getattribute(model, item): """Chained lookup of item on model If item has dots (eg: 'foo.bar.baz'), recursively call getattribute(): e = getattr(model, 'foo') e = getattr(e, 'bar') e = getattr(e, 'baz') At each step, check if e is a callable, and if so, use e() """ elements = item.split('.') element = elements.pop(0) try: attr = getattr(model, element, None) except: return if attr is None: # end of recursion return if callable(attr): try: attr = attr() except: # couldn't call this method without params return if elements: return getattribute(attr, '.'.join(elements)) return attr
[ "def", "getattribute", "(", "model", ",", "item", ")", ":", "elements", "=", "item", ".", "split", "(", "'.'", ")", "element", "=", "elements", ".", "pop", "(", "0", ")", "try", ":", "attr", "=", "getattr", "(", "model", ",", "element", ",", "None", ")", "except", ":", "return", "if", "attr", "is", "None", ":", "# end of recursion", "return", "if", "callable", "(", "attr", ")", ":", "try", ":", "attr", "=", "attr", "(", ")", "except", ":", "# couldn't call this method without params", "return", "if", "elements", ":", "return", "getattribute", "(", "attr", ",", "'.'", ".", "join", "(", "elements", ")", ")", "return", "attr" ]
Chained lookup of item on model If item has dots (eg: 'foo.bar.baz'), recursively call getattribute(): e = getattr(model, 'foo') e = getattr(e, 'bar') e = getattr(e, 'baz') At each step, check if e is a callable, and if so, use e()
[ "Chained", "lookup", "of", "item", "on", "model" ]
a73db486779d93046ad89c5bf582ff8ae869120f
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/templatetags/getter_tags.py#L13-L38
train
magopian/django-data-exports
data_exports/templatetags/getter_tags.py
nice_display
def nice_display(item): """Display a comma-separated list of models for M2M fields""" if hasattr(item, 'all'): # RelatedManager: display a list return ', '.join(map(text_type, item.all())) return item
python
def nice_display(item): """Display a comma-separated list of models for M2M fields""" if hasattr(item, 'all'): # RelatedManager: display a list return ', '.join(map(text_type, item.all())) return item
[ "def", "nice_display", "(", "item", ")", ":", "if", "hasattr", "(", "item", ",", "'all'", ")", ":", "# RelatedManager: display a list", "return", "', '", ".", "join", "(", "map", "(", "text_type", ",", "item", ".", "all", "(", ")", ")", ")", "return", "item" ]
Display a comma-separated list of models for M2M fields
[ "Display", "a", "comma", "-", "separated", "list", "of", "models", "for", "M2M", "fields" ]
a73db486779d93046ad89c5bf582ff8ae869120f
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/templatetags/getter_tags.py#L48-L52
train
nicfit/MishMash
mishmash/util.py
mostCommonItem
def mostCommonItem(lst): """Choose the most common item from the list, or the first item if all items are unique.""" # This elegant solution from: http://stackoverflow.com/a/1518632/1760218 lst = [l for l in lst if l] if lst: return max(set(lst), key=lst.count) else: return None
python
def mostCommonItem(lst): """Choose the most common item from the list, or the first item if all items are unique.""" # This elegant solution from: http://stackoverflow.com/a/1518632/1760218 lst = [l for l in lst if l] if lst: return max(set(lst), key=lst.count) else: return None
[ "def", "mostCommonItem", "(", "lst", ")", ":", "# This elegant solution from: http://stackoverflow.com/a/1518632/1760218", "lst", "=", "[", "l", "for", "l", "in", "lst", "if", "l", "]", "if", "lst", ":", "return", "max", "(", "set", "(", "lst", ")", ",", "key", "=", "lst", ".", "count", ")", "else", ":", "return", "None" ]
Choose the most common item from the list, or the first item if all items are unique.
[ "Choose", "the", "most", "common", "item", "from", "the", "list", "or", "the", "first", "item", "if", "all", "items", "are", "unique", "." ]
8f988936340bf0ffb83ea90ea124efb3c36a1174
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/util.py#L61-L69
train
nicfit/MishMash
mishmash/util.py
safeDbUrl
def safeDbUrl(db_url): """Obfuscates password from a database URL.""" url = urlparse(db_url) return db_url.replace(url.password, "****") if url.password else db_url
python
def safeDbUrl(db_url): """Obfuscates password from a database URL.""" url = urlparse(db_url) return db_url.replace(url.password, "****") if url.password else db_url
[ "def", "safeDbUrl", "(", "db_url", ")", ":", "url", "=", "urlparse", "(", "db_url", ")", "return", "db_url", ".", "replace", "(", "url", ".", "password", ",", "\"****\"", ")", "if", "url", ".", "password", "else", "db_url" ]
Obfuscates password from a database URL.
[ "Obfuscates", "password", "from", "a", "database", "URL", "." ]
8f988936340bf0ffb83ea90ea124efb3c36a1174
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/util.py#L72-L75
train
bear/bearlib
bearlib/config.py
bConfig.loadJson
def loadJson(self, filename): """ Read, parse and return given Json config file """ jsonConfig = {} if os.path.isfile(filename): jsonConfig = json.loads(' '.join(open(filename, 'r').readlines())) return jsonConfig
python
def loadJson(self, filename): """ Read, parse and return given Json config file """ jsonConfig = {} if os.path.isfile(filename): jsonConfig = json.loads(' '.join(open(filename, 'r').readlines())) return jsonConfig
[ "def", "loadJson", "(", "self", ",", "filename", ")", ":", "jsonConfig", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "jsonConfig", "=", "json", ".", "loads", "(", "' '", ".", "join", "(", "open", "(", "filename", ",", "'r'", ")", ".", "readlines", "(", ")", ")", ")", "return", "jsonConfig" ]
Read, parse and return given Json config file
[ "Read", "parse", "and", "return", "given", "Json", "config", "file" ]
30f9b8ba4b7a8db4cd2f4c6e07966ae51d0a00dd
https://github.com/bear/bearlib/blob/30f9b8ba4b7a8db4cd2f4c6e07966ae51d0a00dd/bearlib/config.py#L203-L209
train
magopian/django-data-exports
data_exports/admin.py
ExportAdmin.get_readonly_fields
def get_readonly_fields(self, request, obj=None): """The model can't be changed once the export is created""" if obj is None: return [] return super(ExportAdmin, self).get_readonly_fields(request, obj)
python
def get_readonly_fields(self, request, obj=None): """The model can't be changed once the export is created""" if obj is None: return [] return super(ExportAdmin, self).get_readonly_fields(request, obj)
[ "def", "get_readonly_fields", "(", "self", ",", "request", ",", "obj", "=", "None", ")", ":", "if", "obj", "is", "None", ":", "return", "[", "]", "return", "super", "(", "ExportAdmin", ",", "self", ")", ".", "get_readonly_fields", "(", "request", ",", "obj", ")" ]
The model can't be changed once the export is created
[ "The", "model", "can", "t", "be", "changed", "once", "the", "export", "is", "created" ]
a73db486779d93046ad89c5bf582ff8ae869120f
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/admin.py#L31-L35
train
magopian/django-data-exports
data_exports/admin.py
ExportAdmin.response_add
def response_add(self, request, obj, post_url_continue=POST_URL_CONTINUE): """If we're adding, save must be "save and continue editing" Two exceptions to that workflow: * The user has pressed the 'Save and add another' button * We are adding a user in a popup """ if '_addanother' not in request.POST and '_popup' not in request.POST: request.POST['_continue'] = 1 return super(ExportAdmin, self).response_add(request, obj, post_url_continue)
python
def response_add(self, request, obj, post_url_continue=POST_URL_CONTINUE): """If we're adding, save must be "save and continue editing" Two exceptions to that workflow: * The user has pressed the 'Save and add another' button * We are adding a user in a popup """ if '_addanother' not in request.POST and '_popup' not in request.POST: request.POST['_continue'] = 1 return super(ExportAdmin, self).response_add(request, obj, post_url_continue)
[ "def", "response_add", "(", "self", ",", "request", ",", "obj", ",", "post_url_continue", "=", "POST_URL_CONTINUE", ")", ":", "if", "'_addanother'", "not", "in", "request", ".", "POST", "and", "'_popup'", "not", "in", "request", ".", "POST", ":", "request", ".", "POST", "[", "'_continue'", "]", "=", "1", "return", "super", "(", "ExportAdmin", ",", "self", ")", ".", "response_add", "(", "request", ",", "obj", ",", "post_url_continue", ")" ]
If we're adding, save must be "save and continue editing" Two exceptions to that workflow: * The user has pressed the 'Save and add another' button * We are adding a user in a popup
[ "If", "we", "re", "adding", "save", "must", "be", "save", "and", "continue", "editing" ]
a73db486779d93046ad89c5bf582ff8ae869120f
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/admin.py#L45-L57
train
astraw38/lint
lint/validators/pylint_validator.py
above_score_threshold
def above_score_threshold(new_data, old_data, strict=False, threshold=PYLINT_SCORE_THRESHOLD): """ Verifies that the pylint score is above a given threshold. :param new_data: :param old_data: :return: """ success = True score = 0 message = '' if strict: for fscore, fname in new_data['scores']: if fscore < threshold: success = False score = -1 message += "File {} score ({}) below threshold {}\n".format(fname, fscore, threshold) return success, score, message else: if new_data['average'] < threshold: success = False message = "Failed! Average pylint score ({})" \ " below threshold (9)!".format(new_data['average']) score = -1 return success, score, message
python
def above_score_threshold(new_data, old_data, strict=False, threshold=PYLINT_SCORE_THRESHOLD): """ Verifies that the pylint score is above a given threshold. :param new_data: :param old_data: :return: """ success = True score = 0 message = '' if strict: for fscore, fname in new_data['scores']: if fscore < threshold: success = False score = -1 message += "File {} score ({}) below threshold {}\n".format(fname, fscore, threshold) return success, score, message else: if new_data['average'] < threshold: success = False message = "Failed! Average pylint score ({})" \ " below threshold (9)!".format(new_data['average']) score = -1 return success, score, message
[ "def", "above_score_threshold", "(", "new_data", ",", "old_data", ",", "strict", "=", "False", ",", "threshold", "=", "PYLINT_SCORE_THRESHOLD", ")", ":", "success", "=", "True", "score", "=", "0", "message", "=", "''", "if", "strict", ":", "for", "fscore", ",", "fname", "in", "new_data", "[", "'scores'", "]", ":", "if", "fscore", "<", "threshold", ":", "success", "=", "False", "score", "=", "-", "1", "message", "+=", "\"File {} score ({}) below threshold {}\\n\"", ".", "format", "(", "fname", ",", "fscore", ",", "threshold", ")", "return", "success", ",", "score", ",", "message", "else", ":", "if", "new_data", "[", "'average'", "]", "<", "threshold", ":", "success", "=", "False", "message", "=", "\"Failed! Average pylint score ({})\"", "\" below threshold (9)!\"", ".", "format", "(", "new_data", "[", "'average'", "]", ")", "score", "=", "-", "1", "return", "success", ",", "score", ",", "message" ]
Verifies that the pylint score is above a given threshold. :param new_data: :param old_data: :return:
[ "Verifies", "that", "the", "pylint", "score", "is", "above", "a", "given", "threshold", "." ]
162ceefcb812f07d18544aaa887b9ec4f102cfb1
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/validators/pylint_validator.py#L72-L99
train
astraw38/lint
lint/validators/pylint_validator.py
PylintValidator.run
def run(self, new_pylint_data, old_pylint_data): """ Run the new pylint data through given all current checkers, including comparisons to old pylint data. """ for validator in self.checkers: success, score, message = validator(new_pylint_data, old_pylint_data) if not success: return score, message message = self.default_message.format(new_pylint_data['average']) return self.default_score, message
python
def run(self, new_pylint_data, old_pylint_data): """ Run the new pylint data through given all current checkers, including comparisons to old pylint data. """ for validator in self.checkers: success, score, message = validator(new_pylint_data, old_pylint_data) if not success: return score, message message = self.default_message.format(new_pylint_data['average']) return self.default_score, message
[ "def", "run", "(", "self", ",", "new_pylint_data", ",", "old_pylint_data", ")", ":", "for", "validator", "in", "self", ".", "checkers", ":", "success", ",", "score", ",", "message", "=", "validator", "(", "new_pylint_data", ",", "old_pylint_data", ")", "if", "not", "success", ":", "return", "score", ",", "message", "message", "=", "self", ".", "default_message", ".", "format", "(", "new_pylint_data", "[", "'average'", "]", ")", "return", "self", ".", "default_score", ",", "message" ]
Run the new pylint data through given all current checkers, including comparisons to old pylint data.
[ "Run", "the", "new", "pylint", "data", "through", "given", "all", "current", "checkers", "including", "comparisons", "to", "old", "pylint", "data", "." ]
162ceefcb812f07d18544aaa887b9ec4f102cfb1
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/validators/pylint_validator.py#L34-L46
train
Parsely/schemato
schemato/schemato.py
Schemato._get_document
def _get_document(self, source): """helper, open a file or url and return the content and identifier""" scheme_url = source if not source.startswith("http"): scheme_url = "http://%s" % source text = source try: text = urllib.urlopen(scheme_url).read() except: pass else: return (text, scheme_url) try: text = open(source, "r").read() except: pass else: return (text, source) return (text, None)
python
def _get_document(self, source): """helper, open a file or url and return the content and identifier""" scheme_url = source if not source.startswith("http"): scheme_url = "http://%s" % source text = source try: text = urllib.urlopen(scheme_url).read() except: pass else: return (text, scheme_url) try: text = open(source, "r").read() except: pass else: return (text, source) return (text, None)
[ "def", "_get_document", "(", "self", ",", "source", ")", ":", "scheme_url", "=", "source", "if", "not", "source", ".", "startswith", "(", "\"http\"", ")", ":", "scheme_url", "=", "\"http://%s\"", "%", "source", "text", "=", "source", "try", ":", "text", "=", "urllib", ".", "urlopen", "(", "scheme_url", ")", ".", "read", "(", ")", "except", ":", "pass", "else", ":", "return", "(", "text", ",", "scheme_url", ")", "try", ":", "text", "=", "open", "(", "source", ",", "\"r\"", ")", ".", "read", "(", ")", "except", ":", "pass", "else", ":", "return", "(", "text", ",", "source", ")", "return", "(", "text", ",", "None", ")" ]
helper, open a file or url and return the content and identifier
[ "helper", "open", "a", "file", "or", "url", "and", "return", "the", "content", "and", "identifier" ]
7002316fbcd52f2e669f8372bf1338c572e3df4b
https://github.com/Parsely/schemato/blob/7002316fbcd52f2e669f8372bf1338c572e3df4b/schemato/schemato.py#L74-L96
train
kmike/port-for
port_for/api.py
select_random
def select_random(ports=None, exclude_ports=None): """ Returns random unused port number. """ if ports is None: ports = available_good_ports() if exclude_ports is None: exclude_ports = set() ports.difference_update(set(exclude_ports)) for port in random.sample(ports, min(len(ports), 100)): if not port_is_used(port): return port raise PortForException("Can't select a port")
python
def select_random(ports=None, exclude_ports=None): """ Returns random unused port number. """ if ports is None: ports = available_good_ports() if exclude_ports is None: exclude_ports = set() ports.difference_update(set(exclude_ports)) for port in random.sample(ports, min(len(ports), 100)): if not port_is_used(port): return port raise PortForException("Can't select a port")
[ "def", "select_random", "(", "ports", "=", "None", ",", "exclude_ports", "=", "None", ")", ":", "if", "ports", "is", "None", ":", "ports", "=", "available_good_ports", "(", ")", "if", "exclude_ports", "is", "None", ":", "exclude_ports", "=", "set", "(", ")", "ports", ".", "difference_update", "(", "set", "(", "exclude_ports", ")", ")", "for", "port", "in", "random", ".", "sample", "(", "ports", ",", "min", "(", "len", "(", "ports", ")", ",", "100", ")", ")", ":", "if", "not", "port_is_used", "(", "port", ")", ":", "return", "port", "raise", "PortForException", "(", "\"Can't select a port\"", ")" ]
Returns random unused port number.
[ "Returns", "random", "unused", "port", "number", "." ]
f61ebf3c2caf54eabe8233b40ef67b973176a6f5
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L15-L30
train
kmike/port-for
port_for/api.py
good_port_ranges
def good_port_ranges(ports=None, min_range_len=20, border=3): """ Returns a list of 'good' port ranges. Such ranges are large and don't contain ephemeral or well-known ports. Ranges borders are also excluded. """ min_range_len += border*2 if ports is None: ports = available_ports() ranges = utils.to_ranges(list(ports)) lenghts = sorted([(r[1]-r[0], r) for r in ranges], reverse=True) long_ranges = [l[1] for l in lenghts if l[0] >= min_range_len] without_borders = [(low+border, high-border) for low, high in long_ranges] return without_borders
python
def good_port_ranges(ports=None, min_range_len=20, border=3): """ Returns a list of 'good' port ranges. Such ranges are large and don't contain ephemeral or well-known ports. Ranges borders are also excluded. """ min_range_len += border*2 if ports is None: ports = available_ports() ranges = utils.to_ranges(list(ports)) lenghts = sorted([(r[1]-r[0], r) for r in ranges], reverse=True) long_ranges = [l[1] for l in lenghts if l[0] >= min_range_len] without_borders = [(low+border, high-border) for low, high in long_ranges] return without_borders
[ "def", "good_port_ranges", "(", "ports", "=", "None", ",", "min_range_len", "=", "20", ",", "border", "=", "3", ")", ":", "min_range_len", "+=", "border", "*", "2", "if", "ports", "is", "None", ":", "ports", "=", "available_ports", "(", ")", "ranges", "=", "utils", ".", "to_ranges", "(", "list", "(", "ports", ")", ")", "lenghts", "=", "sorted", "(", "[", "(", "r", "[", "1", "]", "-", "r", "[", "0", "]", ",", "r", ")", "for", "r", "in", "ranges", "]", ",", "reverse", "=", "True", ")", "long_ranges", "=", "[", "l", "[", "1", "]", "for", "l", "in", "lenghts", "if", "l", "[", "0", "]", ">=", "min_range_len", "]", "without_borders", "=", "[", "(", "low", "+", "border", ",", "high", "-", "border", ")", "for", "low", ",", "high", "in", "long_ranges", "]", "return", "without_borders" ]
Returns a list of 'good' port ranges. Such ranges are large and don't contain ephemeral or well-known ports. Ranges borders are also excluded.
[ "Returns", "a", "list", "of", "good", "port", "ranges", ".", "Such", "ranges", "are", "large", "and", "don", "t", "contain", "ephemeral", "or", "well", "-", "known", "ports", ".", "Ranges", "borders", "are", "also", "excluded", "." ]
f61ebf3c2caf54eabe8233b40ef67b973176a6f5
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L61-L74
train
kmike/port-for
port_for/api.py
port_is_used
def port_is_used(port, host='127.0.0.1'): """ Returns if port is used. Port is considered used if the current process can't bind to it or the port doesn't refuse connections. """ unused = _can_bind(port, host) and _refuses_connection(port, host) return not unused
python
def port_is_used(port, host='127.0.0.1'): """ Returns if port is used. Port is considered used if the current process can't bind to it or the port doesn't refuse connections. """ unused = _can_bind(port, host) and _refuses_connection(port, host) return not unused
[ "def", "port_is_used", "(", "port", ",", "host", "=", "'127.0.0.1'", ")", ":", "unused", "=", "_can_bind", "(", "port", ",", "host", ")", "and", "_refuses_connection", "(", "port", ",", "host", ")", "return", "not", "unused" ]
Returns if port is used. Port is considered used if the current process can't bind to it or the port doesn't refuse connections.
[ "Returns", "if", "port", "is", "used", ".", "Port", "is", "considered", "used", "if", "the", "current", "process", "can", "t", "bind", "to", "it", "or", "the", "port", "doesn", "t", "refuse", "connections", "." ]
f61ebf3c2caf54eabe8233b40ef67b973176a6f5
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L83-L89
train
wimglenn/advent-of-code-data
aocd/get.py
current_day
def current_day(): """ Most recent day, if it's during the Advent of Code. Happy Holidays! Day 1 is assumed, otherwise. """ aoc_now = datetime.datetime.now(tz=AOC_TZ) if aoc_now.month != 12: log.warning("current_day is only available in December (EST)") return 1 day = min(aoc_now.day, 25) return day
python
def current_day(): """ Most recent day, if it's during the Advent of Code. Happy Holidays! Day 1 is assumed, otherwise. """ aoc_now = datetime.datetime.now(tz=AOC_TZ) if aoc_now.month != 12: log.warning("current_day is only available in December (EST)") return 1 day = min(aoc_now.day, 25) return day
[ "def", "current_day", "(", ")", ":", "aoc_now", "=", "datetime", ".", "datetime", ".", "now", "(", "tz", "=", "AOC_TZ", ")", "if", "aoc_now", ".", "month", "!=", "12", ":", "log", ".", "warning", "(", "\"current_day is only available in December (EST)\"", ")", "return", "1", "day", "=", "min", "(", "aoc_now", ".", "day", ",", "25", ")", "return", "day" ]
Most recent day, if it's during the Advent of Code. Happy Holidays! Day 1 is assumed, otherwise.
[ "Most", "recent", "day", "if", "it", "s", "during", "the", "Advent", "of", "Code", ".", "Happy", "Holidays!", "Day", "1", "is", "assumed", "otherwise", "." ]
a3856459d225840f2b6919659fc65aa7a6a74533
https://github.com/wimglenn/advent-of-code-data/blob/a3856459d225840f2b6919659fc65aa7a6a74533/aocd/get.py#L57-L67
train
astraw38/lint
lint/linters/lint_factory.py
LintFactory.register_linter
def register_linter(linter): """ Register a Linter class for file verification. :param linter: :return: """ if hasattr(linter, "EXTS") and hasattr(linter, "run"): LintFactory.PLUGINS.append(linter) else: raise LinterException("Linter does not have 'run' method or EXTS variable!")
python
def register_linter(linter): """ Register a Linter class for file verification. :param linter: :return: """ if hasattr(linter, "EXTS") and hasattr(linter, "run"): LintFactory.PLUGINS.append(linter) else: raise LinterException("Linter does not have 'run' method or EXTS variable!")
[ "def", "register_linter", "(", "linter", ")", ":", "if", "hasattr", "(", "linter", ",", "\"EXTS\"", ")", "and", "hasattr", "(", "linter", ",", "\"run\"", ")", ":", "LintFactory", ".", "PLUGINS", ".", "append", "(", "linter", ")", "else", ":", "raise", "LinterException", "(", "\"Linter does not have 'run' method or EXTS variable!\"", ")" ]
Register a Linter class for file verification. :param linter: :return:
[ "Register", "a", "Linter", "class", "for", "file", "verification", "." ]
162ceefcb812f07d18544aaa887b9ec4f102cfb1
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/linters/lint_factory.py#L32-L42
train
kavdev/ldap-groups
ldap_groups/utils.py
escape_query
def escape_query(query): """Escapes certain filter characters from an LDAP query.""" return query.replace("\\", r"\5C").replace("*", r"\2A").replace("(", r"\28").replace(")", r"\29")
python
def escape_query(query): """Escapes certain filter characters from an LDAP query.""" return query.replace("\\", r"\5C").replace("*", r"\2A").replace("(", r"\28").replace(")", r"\29")
[ "def", "escape_query", "(", "query", ")", ":", "return", "query", ".", "replace", "(", "\"\\\\\"", ",", "r\"\\5C\"", ")", ".", "replace", "(", "\"*\"", ",", "r\"\\2A\"", ")", ".", "replace", "(", "\"(\"", ",", "r\"\\28\"", ")", ".", "replace", "(", "\")\"", ",", "r\"\\29\"", ")" ]
Escapes certain filter characters from an LDAP query.
[ "Escapes", "certain", "filter", "characters", "from", "an", "LDAP", "query", "." ]
0dd3a7d9eafa3903127364839b12a4b3dd3ca521
https://github.com/kavdev/ldap-groups/blob/0dd3a7d9eafa3903127364839b12a4b3dd3ca521/ldap_groups/utils.py#L23-L26
train
ambitioninc/kmatch
kmatch/kmatch.py
K._validate_xor_args
def _validate_xor_args(self, p): """ Raises ValueError if 2 arguments are not passed to an XOR """ if len(p[1]) != 2: raise ValueError('Invalid syntax: XOR only accepts 2 arguments, got {0}: {1}'.format(len(p[1]), p))
python
def _validate_xor_args(self, p): """ Raises ValueError if 2 arguments are not passed to an XOR """ if len(p[1]) != 2: raise ValueError('Invalid syntax: XOR only accepts 2 arguments, got {0}: {1}'.format(len(p[1]), p))
[ "def", "_validate_xor_args", "(", "self", ",", "p", ")", ":", "if", "len", "(", "p", "[", "1", "]", ")", "!=", "2", ":", "raise", "ValueError", "(", "'Invalid syntax: XOR only accepts 2 arguments, got {0}: {1}'", ".", "format", "(", "len", "(", "p", "[", "1", "]", ")", ",", "p", ")", ")" ]
Raises ValueError if 2 arguments are not passed to an XOR
[ "Raises", "ValueError", "if", "2", "arguments", "are", "not", "passed", "to", "an", "XOR" ]
22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L98-L103
train
ambitioninc/kmatch
kmatch/kmatch.py
K._match_value_filter
def _match_value_filter(self, p, value): """ Returns True of False if value in the pattern p matches the filter. """ return self._VALUE_FILTER_MAP[p[0]](value[p[1]], p[2])
python
def _match_value_filter(self, p, value): """ Returns True of False if value in the pattern p matches the filter. """ return self._VALUE_FILTER_MAP[p[0]](value[p[1]], p[2])
[ "def", "_match_value_filter", "(", "self", ",", "p", ",", "value", ")", ":", "return", "self", ".", "_VALUE_FILTER_MAP", "[", "p", "[", "0", "]", "]", "(", "value", "[", "p", "[", "1", "]", "]", ",", "p", "[", "2", "]", ")" ]
Returns True of False if value in the pattern p matches the filter.
[ "Returns", "True", "of", "False", "if", "value", "in", "the", "pattern", "p", "matches", "the", "filter", "." ]
22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L139-L143
train
ambitioninc/kmatch
kmatch/kmatch.py
K.get_field_keys
def get_field_keys(self, pattern=None): """ Builds a set of all field keys used in the pattern including nested fields. :param pattern: The kmatch pattern to get field keys from or None to use self.pattern :type pattern: list or None :returns: A set object of all field keys used in the pattern :rtype: set """ # Use own pattern or passed in argument for recursion pattern = pattern or self.pattern # Validate the pattern so we can make assumptions about the data self._validate(pattern) keys = set() # Valid pattern length can only be 2 or 3 # With key filters, field key is second item just like 3 item patterns if len(pattern) == 2 and pattern[0] not in self._KEY_FILTER_MAP: if pattern[0] in ('&', '|', '^'): # Pass each nested pattern to get_field_keys for filter_item in pattern[1]: keys = keys.union(self.get_field_keys(filter_item)) else: # pattern[0] == '!' keys = keys.union(self.get_field_keys(pattern[1])) else: # Pattern length is 3 keys.add(pattern[1]) return keys
python
def get_field_keys(self, pattern=None): """ Builds a set of all field keys used in the pattern including nested fields. :param pattern: The kmatch pattern to get field keys from or None to use self.pattern :type pattern: list or None :returns: A set object of all field keys used in the pattern :rtype: set """ # Use own pattern or passed in argument for recursion pattern = pattern or self.pattern # Validate the pattern so we can make assumptions about the data self._validate(pattern) keys = set() # Valid pattern length can only be 2 or 3 # With key filters, field key is second item just like 3 item patterns if len(pattern) == 2 and pattern[0] not in self._KEY_FILTER_MAP: if pattern[0] in ('&', '|', '^'): # Pass each nested pattern to get_field_keys for filter_item in pattern[1]: keys = keys.union(self.get_field_keys(filter_item)) else: # pattern[0] == '!' keys = keys.union(self.get_field_keys(pattern[1])) else: # Pattern length is 3 keys.add(pattern[1]) return keys
[ "def", "get_field_keys", "(", "self", ",", "pattern", "=", "None", ")", ":", "# Use own pattern or passed in argument for recursion", "pattern", "=", "pattern", "or", "self", ".", "pattern", "# Validate the pattern so we can make assumptions about the data", "self", ".", "_validate", "(", "pattern", ")", "keys", "=", "set", "(", ")", "# Valid pattern length can only be 2 or 3", "# With key filters, field key is second item just like 3 item patterns", "if", "len", "(", "pattern", ")", "==", "2", "and", "pattern", "[", "0", "]", "not", "in", "self", ".", "_KEY_FILTER_MAP", ":", "if", "pattern", "[", "0", "]", "in", "(", "'&'", ",", "'|'", ",", "'^'", ")", ":", "# Pass each nested pattern to get_field_keys", "for", "filter_item", "in", "pattern", "[", "1", "]", ":", "keys", "=", "keys", ".", "union", "(", "self", ".", "get_field_keys", "(", "filter_item", ")", ")", "else", ":", "# pattern[0] == '!'", "keys", "=", "keys", ".", "union", "(", "self", ".", "get_field_keys", "(", "pattern", "[", "1", "]", ")", ")", "else", ":", "# Pattern length is 3", "keys", ".", "add", "(", "pattern", "[", "1", "]", ")", "return", "keys" ]
Builds a set of all field keys used in the pattern including nested fields. :param pattern: The kmatch pattern to get field keys from or None to use self.pattern :type pattern: list or None :returns: A set object of all field keys used in the pattern :rtype: set
[ "Builds", "a", "set", "of", "all", "field", "keys", "used", "in", "the", "pattern", "including", "nested", "fields", "." ]
22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L164-L194
train
jim-easterbrook/pyctools
src/pyctools/core/frame.py
Metadata.to_file
def to_file(self, path): """Write metadata to an image, video or XMP sidecar file. :param str path: The image/video file path name. """ xmp_path = path + '.xmp' # remove any existing XMP file if os.path.exists(xmp_path): os.unlink(xmp_path) # attempt to open image/video file for metadata md_path = path md = GExiv2.Metadata() try: md.open_path(md_path) except GLib.GError: # file type does not support metadata so use XMP sidecar md_path = xmp_path # create empty XMP file with open(md_path, 'w') as of: of.write('''<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?> <x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="XMP Core 4.4.0-Exiv2"> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/" xmp:CreatorTool=""/> </rdf:RDF> </x:xmpmeta> <?xpacket end="w"?>''') md = GExiv2.Metadata() md.open_path(md_path) # add our namespace md.register_xmp_namespace( 'https://github.com/jim-easterbrook/pyctools', 'pyctools') # copy metadata for tag, value in self.data.items(): if md.get_tag_type(tag) in ('XmpBag', 'XmpSeq'): md.set_tag_multiple(tag, value) else: md.set_tag_string(tag, value) if self.comment is not None: md.set_comment(self.comment) # save file md.save_file(md_path)
python
def to_file(self, path): """Write metadata to an image, video or XMP sidecar file. :param str path: The image/video file path name. """ xmp_path = path + '.xmp' # remove any existing XMP file if os.path.exists(xmp_path): os.unlink(xmp_path) # attempt to open image/video file for metadata md_path = path md = GExiv2.Metadata() try: md.open_path(md_path) except GLib.GError: # file type does not support metadata so use XMP sidecar md_path = xmp_path # create empty XMP file with open(md_path, 'w') as of: of.write('''<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?> <x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="XMP Core 4.4.0-Exiv2"> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/" xmp:CreatorTool=""/> </rdf:RDF> </x:xmpmeta> <?xpacket end="w"?>''') md = GExiv2.Metadata() md.open_path(md_path) # add our namespace md.register_xmp_namespace( 'https://github.com/jim-easterbrook/pyctools', 'pyctools') # copy metadata for tag, value in self.data.items(): if md.get_tag_type(tag) in ('XmpBag', 'XmpSeq'): md.set_tag_multiple(tag, value) else: md.set_tag_string(tag, value) if self.comment is not None: md.set_comment(self.comment) # save file md.save_file(md_path)
[ "def", "to_file", "(", "self", ",", "path", ")", ":", "xmp_path", "=", "path", "+", "'.xmp'", "# remove any existing XMP file", "if", "os", ".", "path", ".", "exists", "(", "xmp_path", ")", ":", "os", ".", "unlink", "(", "xmp_path", ")", "# attempt to open image/video file for metadata", "md_path", "=", "path", "md", "=", "GExiv2", ".", "Metadata", "(", ")", "try", ":", "md", ".", "open_path", "(", "md_path", ")", "except", "GLib", ".", "GError", ":", "# file type does not support metadata so use XMP sidecar", "md_path", "=", "xmp_path", "# create empty XMP file", "with", "open", "(", "md_path", ",", "'w'", ")", "as", "of", ":", "of", ".", "write", "(", "'''<?xpacket begin=\"\" id=\"W5M0MpCehiHzreSzNTczkc9d\"?>\n<x:xmpmeta xmlns:x=\"adobe:ns:meta/\" x:xmptk=\"XMP Core 4.4.0-Exiv2\">\n <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">\n <rdf:Description rdf:about=\"\"\n xmlns:xmp=\"http://ns.adobe.com/xap/1.0/\"\n xmp:CreatorTool=\"\"/>\n </rdf:RDF>\n</x:xmpmeta>\n<?xpacket end=\"w\"?>'''", ")", "md", "=", "GExiv2", ".", "Metadata", "(", ")", "md", ".", "open_path", "(", "md_path", ")", "# add our namespace", "md", ".", "register_xmp_namespace", "(", "'https://github.com/jim-easterbrook/pyctools'", ",", "'pyctools'", ")", "# copy metadata", "for", "tag", ",", "value", "in", "self", ".", "data", ".", "items", "(", ")", ":", "if", "md", ".", "get_tag_type", "(", "tag", ")", "in", "(", "'XmpBag'", ",", "'XmpSeq'", ")", ":", "md", ".", "set_tag_multiple", "(", "tag", ",", "value", ")", "else", ":", "md", ".", "set_tag_string", "(", "tag", ",", "value", ")", "if", "self", ".", "comment", "is", "not", "None", ":", "md", ".", "set_comment", "(", "self", ".", "comment", ")", "# save file", "md", ".", "save_file", "(", "md_path", ")" ]
Write metadata to an image, video or XMP sidecar file. :param str path: The image/video file path name.
[ "Write", "metadata", "to", "an", "image", "video", "or", "XMP", "sidecar", "file", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L231-L274
train
jim-easterbrook/pyctools
src/pyctools/core/frame.py
Metadata.image_size
def image_size(self): """Get image dimensions from metadata. This is primarily used by the :py:class:`~pyctools.components.io.rawfilereader.RawFileReader` component, as raw video files don't have a header in which to store the dimensions. :returns: width, height. :rtype: :py:class:`int`, :py:class:`int` """ xlen = None ylen = None for tag in ('Xmp.pyctools.xlen', 'Exif.Photo.PixelXDimension', 'Exif.Image.ImageWidth', 'Xmp.tiff.ImageWidth'): if tag in self.data: xlen = int(self.data[tag]) break for tag in ('Xmp.pyctools.ylen', 'Exif.Photo.PixelYDimension', 'Exif.Image.ImageLength', 'Xmp.tiff.ImageLength'): if tag in self.data: ylen = int(self.data[tag]) break if xlen and ylen: return xlen, ylen raise RuntimeError('Metadata does not have image dimensions')
python
def image_size(self): """Get image dimensions from metadata. This is primarily used by the :py:class:`~pyctools.components.io.rawfilereader.RawFileReader` component, as raw video files don't have a header in which to store the dimensions. :returns: width, height. :rtype: :py:class:`int`, :py:class:`int` """ xlen = None ylen = None for tag in ('Xmp.pyctools.xlen', 'Exif.Photo.PixelXDimension', 'Exif.Image.ImageWidth', 'Xmp.tiff.ImageWidth'): if tag in self.data: xlen = int(self.data[tag]) break for tag in ('Xmp.pyctools.ylen', 'Exif.Photo.PixelYDimension', 'Exif.Image.ImageLength', 'Xmp.tiff.ImageLength'): if tag in self.data: ylen = int(self.data[tag]) break if xlen and ylen: return xlen, ylen raise RuntimeError('Metadata does not have image dimensions')
[ "def", "image_size", "(", "self", ")", ":", "xlen", "=", "None", "ylen", "=", "None", "for", "tag", "in", "(", "'Xmp.pyctools.xlen'", ",", "'Exif.Photo.PixelXDimension'", ",", "'Exif.Image.ImageWidth'", ",", "'Xmp.tiff.ImageWidth'", ")", ":", "if", "tag", "in", "self", ".", "data", ":", "xlen", "=", "int", "(", "self", ".", "data", "[", "tag", "]", ")", "break", "for", "tag", "in", "(", "'Xmp.pyctools.ylen'", ",", "'Exif.Photo.PixelYDimension'", ",", "'Exif.Image.ImageLength'", ",", "'Xmp.tiff.ImageLength'", ")", ":", "if", "tag", "in", "self", ".", "data", ":", "ylen", "=", "int", "(", "self", ".", "data", "[", "tag", "]", ")", "break", "if", "xlen", "and", "ylen", ":", "return", "xlen", ",", "ylen", "raise", "RuntimeError", "(", "'Metadata does not have image dimensions'", ")" ]
Get image dimensions from metadata. This is primarily used by the :py:class:`~pyctools.components.io.rawfilereader.RawFileReader` component, as raw video files don't have a header in which to store the dimensions. :returns: width, height. :rtype: :py:class:`int`, :py:class:`int`
[ "Get", "image", "dimensions", "from", "metadata", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L295-L322
train
jim-easterbrook/pyctools
src/pyctools/core/frame.py
Metadata.get
def get(self, tag, default=None): """Get a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :returns: The metadata value associated with ``tag``. :rtype: :py:class:`str` """ full_tag = 'Xmp.pyctools.' + tag if full_tag in self.data: return self.data[full_tag] return default
python
def get(self, tag, default=None): """Get a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :returns: The metadata value associated with ``tag``. :rtype: :py:class:`str` """ full_tag = 'Xmp.pyctools.' + tag if full_tag in self.data: return self.data[full_tag] return default
[ "def", "get", "(", "self", ",", "tag", ",", "default", "=", "None", ")", ":", "full_tag", "=", "'Xmp.pyctools.'", "+", "tag", "if", "full_tag", "in", "self", ".", "data", ":", "return", "self", ".", "data", "[", "full_tag", "]", "return", "default" ]
Get a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :returns: The metadata value associated with ``tag``. :rtype: :py:class:`str`
[ "Get", "a", "metadata", "value", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L324-L342
train
jim-easterbrook/pyctools
src/pyctools/core/frame.py
Metadata.set
def set(self, tag, value): """Set a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :param str value: The metadata value. """ full_tag = 'Xmp.pyctools.' + tag self.data[full_tag] = value
python
def set(self, tag, value): """Set a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :param str value: The metadata value. """ full_tag = 'Xmp.pyctools.' + tag self.data[full_tag] = value
[ "def", "set", "(", "self", ",", "tag", ",", "value", ")", ":", "full_tag", "=", "'Xmp.pyctools.'", "+", "tag", "self", ".", "data", "[", "full_tag", "]", "=", "value" ]
Set a metadata value. Each metadata value is referenced by a ``tag`` -- a short string such as ``'xlen'`` or ``'audit'``. In the sidecar file these tag names are prepended with ``'Xmp.pyctools.'``, which corresponds to a custom namespace in the XML file. :param str tag: The tag name. :param str value: The metadata value.
[ "Set", "a", "metadata", "value", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L344-L358
train
biplap-sarkar/pylimit
pylimit/redis_helper.py
RedisHelper.get_connection
def get_connection(self, is_read_only=False) -> redis.StrictRedis: """ Gets a StrictRedis connection for normal redis or for redis sentinel based upon redis mode in configuration. :type is_read_only: bool :param is_read_only: In case of redis sentinel, it returns connection to slave :return: Returns a StrictRedis connection """ if self.connection is not None: return self.connection if self.is_sentinel: kwargs = dict() if self.password: kwargs["password"] = self.password sentinel = Sentinel([(self.host, self.port)], **kwargs) if is_read_only: connection = sentinel.slave_for(self.sentinel_service, decode_responses=True) else: connection = sentinel.master_for(self.sentinel_service, decode_responses=True) else: connection = redis.StrictRedis(host=self.host, port=self.port, decode_responses=True, password=self.password) self.connection = connection return connection
python
def get_connection(self, is_read_only=False) -> redis.StrictRedis: """ Gets a StrictRedis connection for normal redis or for redis sentinel based upon redis mode in configuration. :type is_read_only: bool :param is_read_only: In case of redis sentinel, it returns connection to slave :return: Returns a StrictRedis connection """ if self.connection is not None: return self.connection if self.is_sentinel: kwargs = dict() if self.password: kwargs["password"] = self.password sentinel = Sentinel([(self.host, self.port)], **kwargs) if is_read_only: connection = sentinel.slave_for(self.sentinel_service, decode_responses=True) else: connection = sentinel.master_for(self.sentinel_service, decode_responses=True) else: connection = redis.StrictRedis(host=self.host, port=self.port, decode_responses=True, password=self.password) self.connection = connection return connection
[ "def", "get_connection", "(", "self", ",", "is_read_only", "=", "False", ")", "->", "redis", ".", "StrictRedis", ":", "if", "self", ".", "connection", "is", "not", "None", ":", "return", "self", ".", "connection", "if", "self", ".", "is_sentinel", ":", "kwargs", "=", "dict", "(", ")", "if", "self", ".", "password", ":", "kwargs", "[", "\"password\"", "]", "=", "self", ".", "password", "sentinel", "=", "Sentinel", "(", "[", "(", "self", ".", "host", ",", "self", ".", "port", ")", "]", ",", "*", "*", "kwargs", ")", "if", "is_read_only", ":", "connection", "=", "sentinel", ".", "slave_for", "(", "self", ".", "sentinel_service", ",", "decode_responses", "=", "True", ")", "else", ":", "connection", "=", "sentinel", ".", "master_for", "(", "self", ".", "sentinel_service", ",", "decode_responses", "=", "True", ")", "else", ":", "connection", "=", "redis", ".", "StrictRedis", "(", "host", "=", "self", ".", "host", ",", "port", "=", "self", ".", "port", ",", "decode_responses", "=", "True", ",", "password", "=", "self", ".", "password", ")", "self", ".", "connection", "=", "connection", "return", "connection" ]
Gets a StrictRedis connection for normal redis or for redis sentinel based upon redis mode in configuration. :type is_read_only: bool :param is_read_only: In case of redis sentinel, it returns connection to slave :return: Returns a StrictRedis connection
[ "Gets", "a", "StrictRedis", "connection", "for", "normal", "redis", "or", "for", "redis", "sentinel", "based", "upon", "redis", "mode", "in", "configuration", "." ]
d2170a8c02a9be083f37c9e4ec1e28700a33d64e
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/redis_helper.py#L18-L43
train
nicoddemus/ss
ss.py
calculate_hash_for_file
def calculate_hash_for_file(name): ''' Calculates the hash for the given filename. Algorithm from: http://trac.opensubtitles.org/projects/opensubtitles/wiki/HashSourceCodes @param name: str Path to the file @return: str The calculated hash code, as an hex string. ''' longlongformat = 'q' # long long bytesize = struct.calcsize(longlongformat) f = open(name, "rb") filesize = os.path.getsize(name) hash = filesize minimum_size = 65536 * 2 assert filesize >= minimum_size, \ 'Movie {name} must have at least {min} bytes'.format(min=minimum_size, name=name) for x in range(65536//bytesize): buffer = f.read(bytesize) (l_value,)= struct.unpack(longlongformat, buffer) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF #to remain as 64bit number f.seek(max(0,filesize-65536),0) for x in range(65536//bytesize): buffer = f.read(bytesize) (l_value,)= struct.unpack(longlongformat, buffer) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF f.close() returnedhash = "%016x" % hash return returnedhash
python
def calculate_hash_for_file(name): ''' Calculates the hash for the given filename. Algorithm from: http://trac.opensubtitles.org/projects/opensubtitles/wiki/HashSourceCodes @param name: str Path to the file @return: str The calculated hash code, as an hex string. ''' longlongformat = 'q' # long long bytesize = struct.calcsize(longlongformat) f = open(name, "rb") filesize = os.path.getsize(name) hash = filesize minimum_size = 65536 * 2 assert filesize >= minimum_size, \ 'Movie {name} must have at least {min} bytes'.format(min=minimum_size, name=name) for x in range(65536//bytesize): buffer = f.read(bytesize) (l_value,)= struct.unpack(longlongformat, buffer) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF #to remain as 64bit number f.seek(max(0,filesize-65536),0) for x in range(65536//bytesize): buffer = f.read(bytesize) (l_value,)= struct.unpack(longlongformat, buffer) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF f.close() returnedhash = "%016x" % hash return returnedhash
[ "def", "calculate_hash_for_file", "(", "name", ")", ":", "longlongformat", "=", "'q'", "# long long", "bytesize", "=", "struct", ".", "calcsize", "(", "longlongformat", ")", "f", "=", "open", "(", "name", ",", "\"rb\"", ")", "filesize", "=", "os", ".", "path", ".", "getsize", "(", "name", ")", "hash", "=", "filesize", "minimum_size", "=", "65536", "*", "2", "assert", "filesize", ">=", "minimum_size", ",", "'Movie {name} must have at least {min} bytes'", ".", "format", "(", "min", "=", "minimum_size", ",", "name", "=", "name", ")", "for", "x", "in", "range", "(", "65536", "//", "bytesize", ")", ":", "buffer", "=", "f", ".", "read", "(", "bytesize", ")", "(", "l_value", ",", ")", "=", "struct", ".", "unpack", "(", "longlongformat", ",", "buffer", ")", "hash", "+=", "l_value", "hash", "=", "hash", "&", "0xFFFFFFFFFFFFFFFF", "#to remain as 64bit number", "f", ".", "seek", "(", "max", "(", "0", ",", "filesize", "-", "65536", ")", ",", "0", ")", "for", "x", "in", "range", "(", "65536", "//", "bytesize", ")", ":", "buffer", "=", "f", ".", "read", "(", "bytesize", ")", "(", "l_value", ",", ")", "=", "struct", ".", "unpack", "(", "longlongformat", ",", "buffer", ")", "hash", "+=", "l_value", "hash", "=", "hash", "&", "0xFFFFFFFFFFFFFFFF", "f", ".", "close", "(", ")", "returnedhash", "=", "\"%016x\"", "%", "hash", "return", "returnedhash" ]
Calculates the hash for the given filename. Algorithm from: http://trac.opensubtitles.org/projects/opensubtitles/wiki/HashSourceCodes @param name: str Path to the file @return: str The calculated hash code, as an hex string.
[ "Calculates", "the", "hash", "for", "the", "given", "filename", "." ]
df77c745e511f542c456450ed94adff1b969fc92
https://github.com/nicoddemus/ss/blob/df77c745e511f542c456450ed94adff1b969fc92/ss.py#L218-L259
train
jim-easterbrook/pyctools
src/pyctools/core/config.py
ConfigMixin.set_config
def set_config(self, config): """Update the component's configuration. Use the :py:meth:`get_config` method to get a copy of the component's configuration, update that copy then call :py:meth:`set_config` to update the component. This enables the configuration to be changed in a threadsafe manner while the component is running, and allows several values to be changed at once. :param ConfigParent config: New configuration. """ # put copy of config on queue for running component self._configmixin_queue.append(copy.deepcopy(config)) # notify component, using thread safe method self.new_config()
python
def set_config(self, config): """Update the component's configuration. Use the :py:meth:`get_config` method to get a copy of the component's configuration, update that copy then call :py:meth:`set_config` to update the component. This enables the configuration to be changed in a threadsafe manner while the component is running, and allows several values to be changed at once. :param ConfigParent config: New configuration. """ # put copy of config on queue for running component self._configmixin_queue.append(copy.deepcopy(config)) # notify component, using thread safe method self.new_config()
[ "def", "set_config", "(", "self", ",", "config", ")", ":", "# put copy of config on queue for running component", "self", ".", "_configmixin_queue", ".", "append", "(", "copy", ".", "deepcopy", "(", "config", ")", ")", "# notify component, using thread safe method", "self", ".", "new_config", "(", ")" ]
Update the component's configuration. Use the :py:meth:`get_config` method to get a copy of the component's configuration, update that copy then call :py:meth:`set_config` to update the component. This enables the configuration to be changed in a threadsafe manner while the component is running, and allows several values to be changed at once. :param ConfigParent config: New configuration.
[ "Update", "the", "component", "s", "configuration", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/config.py#L381-L397
train
jim-easterbrook/pyctools
src/pyctools/components/interp/gaussianfilter.py
GaussianFilterCore
def GaussianFilterCore(x_sigma=0.0, y_sigma=0.0): """Gaussian filter generator core. Alternative to the :py:class:`GaussianFilter` component that can be used to make a non-reconfigurable resizer:: resize = Resize() resize.filter(GaussianFilterCore(x_sigma=1.5)) ... start(..., resize, ...) ... :keyword float x_sigma: Horizontal standard deviation parameter. :keyword float y_sigma: Vertical standard deviation parameter. :return: A :py:class:`~pyctools.core.frame.Frame` object containing the filter. """ def filter_1D(sigma): alpha = 1.0 / (2.0 * (max(sigma, 0.0001) ** 2.0)) coefs = [] coef = 1.0 while coef > 0.0001: coefs.append(coef) coef = math.exp(-(alpha * (float(len(coefs) ** 2)))) fil_dim = len(coefs) - 1 result = numpy.zeros(1 + (fil_dim * 2), dtype=numpy.float32) for n, coef in enumerate(coefs): result[fil_dim - n] = coef result[fil_dim + n] = coef # normalise result result /= result.sum() return result x_sigma = max(x_sigma, 0.0) y_sigma = max(y_sigma, 0.0) x_fil = filter_1D(x_sigma) y_fil = filter_1D(y_sigma) result = numpy.empty( [y_fil.shape[0], x_fil.shape[0], 1], dtype=numpy.float32) for y in range(y_fil.shape[0]): for x in range(x_fil.shape[0]): result[y, x, 0] = x_fil[x] * y_fil[y] out_frame = Frame() out_frame.data = result out_frame.type = 'fil' audit = out_frame.metadata.get('audit') audit += 'data = GaussianFilter()\n' if x_sigma != 0.0: audit += ' x_sigma: %g\n' % (x_sigma) if y_sigma != 0.0: audit += ' y_sigma: %g\n' % (y_sigma) out_frame.metadata.set('audit', audit) return out_frame
python
def GaussianFilterCore(x_sigma=0.0, y_sigma=0.0): """Gaussian filter generator core. Alternative to the :py:class:`GaussianFilter` component that can be used to make a non-reconfigurable resizer:: resize = Resize() resize.filter(GaussianFilterCore(x_sigma=1.5)) ... start(..., resize, ...) ... :keyword float x_sigma: Horizontal standard deviation parameter. :keyword float y_sigma: Vertical standard deviation parameter. :return: A :py:class:`~pyctools.core.frame.Frame` object containing the filter. """ def filter_1D(sigma): alpha = 1.0 / (2.0 * (max(sigma, 0.0001) ** 2.0)) coefs = [] coef = 1.0 while coef > 0.0001: coefs.append(coef) coef = math.exp(-(alpha * (float(len(coefs) ** 2)))) fil_dim = len(coefs) - 1 result = numpy.zeros(1 + (fil_dim * 2), dtype=numpy.float32) for n, coef in enumerate(coefs): result[fil_dim - n] = coef result[fil_dim + n] = coef # normalise result result /= result.sum() return result x_sigma = max(x_sigma, 0.0) y_sigma = max(y_sigma, 0.0) x_fil = filter_1D(x_sigma) y_fil = filter_1D(y_sigma) result = numpy.empty( [y_fil.shape[0], x_fil.shape[0], 1], dtype=numpy.float32) for y in range(y_fil.shape[0]): for x in range(x_fil.shape[0]): result[y, x, 0] = x_fil[x] * y_fil[y] out_frame = Frame() out_frame.data = result out_frame.type = 'fil' audit = out_frame.metadata.get('audit') audit += 'data = GaussianFilter()\n' if x_sigma != 0.0: audit += ' x_sigma: %g\n' % (x_sigma) if y_sigma != 0.0: audit += ' y_sigma: %g\n' % (y_sigma) out_frame.metadata.set('audit', audit) return out_frame
[ "def", "GaussianFilterCore", "(", "x_sigma", "=", "0.0", ",", "y_sigma", "=", "0.0", ")", ":", "def", "filter_1D", "(", "sigma", ")", ":", "alpha", "=", "1.0", "/", "(", "2.0", "*", "(", "max", "(", "sigma", ",", "0.0001", ")", "**", "2.0", ")", ")", "coefs", "=", "[", "]", "coef", "=", "1.0", "while", "coef", ">", "0.0001", ":", "coefs", ".", "append", "(", "coef", ")", "coef", "=", "math", ".", "exp", "(", "-", "(", "alpha", "*", "(", "float", "(", "len", "(", "coefs", ")", "**", "2", ")", ")", ")", ")", "fil_dim", "=", "len", "(", "coefs", ")", "-", "1", "result", "=", "numpy", ".", "zeros", "(", "1", "+", "(", "fil_dim", "*", "2", ")", ",", "dtype", "=", "numpy", ".", "float32", ")", "for", "n", ",", "coef", "in", "enumerate", "(", "coefs", ")", ":", "result", "[", "fil_dim", "-", "n", "]", "=", "coef", "result", "[", "fil_dim", "+", "n", "]", "=", "coef", "# normalise result", "result", "/=", "result", ".", "sum", "(", ")", "return", "result", "x_sigma", "=", "max", "(", "x_sigma", ",", "0.0", ")", "y_sigma", "=", "max", "(", "y_sigma", ",", "0.0", ")", "x_fil", "=", "filter_1D", "(", "x_sigma", ")", "y_fil", "=", "filter_1D", "(", "y_sigma", ")", "result", "=", "numpy", ".", "empty", "(", "[", "y_fil", ".", "shape", "[", "0", "]", ",", "x_fil", ".", "shape", "[", "0", "]", ",", "1", "]", ",", "dtype", "=", "numpy", ".", "float32", ")", "for", "y", "in", "range", "(", "y_fil", ".", "shape", "[", "0", "]", ")", ":", "for", "x", "in", "range", "(", "x_fil", ".", "shape", "[", "0", "]", ")", ":", "result", "[", "y", ",", "x", ",", "0", "]", "=", "x_fil", "[", "x", "]", "*", "y_fil", "[", "y", "]", "out_frame", "=", "Frame", "(", ")", "out_frame", ".", "data", "=", "result", "out_frame", ".", "type", "=", "'fil'", "audit", "=", "out_frame", ".", "metadata", ".", "get", "(", "'audit'", ")", "audit", "+=", "'data = GaussianFilter()\\n'", "if", "x_sigma", "!=", "0.0", ":", "audit", "+=", "' x_sigma: %g\\n'", "%", "(", "x_sigma", ")", "if", "y_sigma", "!=", "0.0", ":", "audit", "+=", "' y_sigma: %g\\n'", "%", "(", "y_sigma", ")", "out_frame", ".", "metadata", ".", "set", "(", "'audit'", ",", "audit", ")", "return", "out_frame" ]
Gaussian filter generator core. Alternative to the :py:class:`GaussianFilter` component that can be used to make a non-reconfigurable resizer:: resize = Resize() resize.filter(GaussianFilterCore(x_sigma=1.5)) ... start(..., resize, ...) ... :keyword float x_sigma: Horizontal standard deviation parameter. :keyword float y_sigma: Vertical standard deviation parameter. :return: A :py:class:`~pyctools.core.frame.Frame` object containing the filter.
[ "Gaussian", "filter", "generator", "core", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/components/interp/gaussianfilter.py#L92-L147
train
jim-easterbrook/pyctools
src/pyctools/core/qt.py
QtEventLoop.queue_command
def queue_command(self, command): """Put a command on the queue to be called in the component's thread. :param callable command: the method to be invoked, e.g. :py:meth:`~Component.new_frame_event`. """ if self._running: # queue event normally QtCore.QCoreApplication.postEvent( self, ActionEvent(command), QtCore.Qt.LowEventPriority) else: # save event until we are started self._incoming.append(command)
python
def queue_command(self, command): """Put a command on the queue to be called in the component's thread. :param callable command: the method to be invoked, e.g. :py:meth:`~Component.new_frame_event`. """ if self._running: # queue event normally QtCore.QCoreApplication.postEvent( self, ActionEvent(command), QtCore.Qt.LowEventPriority) else: # save event until we are started self._incoming.append(command)
[ "def", "queue_command", "(", "self", ",", "command", ")", ":", "if", "self", ".", "_running", ":", "# queue event normally", "QtCore", ".", "QCoreApplication", ".", "postEvent", "(", "self", ",", "ActionEvent", "(", "command", ")", ",", "QtCore", ".", "Qt", ".", "LowEventPriority", ")", "else", ":", "# save event until we are started", "self", ".", "_incoming", ".", "append", "(", "command", ")" ]
Put a command on the queue to be called in the component's thread. :param callable command: the method to be invoked, e.g. :py:meth:`~Component.new_frame_event`.
[ "Put", "a", "command", "on", "the", "queue", "to", "be", "called", "in", "the", "component", "s", "thread", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/qt.py#L72-L86
train
jim-easterbrook/pyctools
src/pyctools/core/qt.py
QtEventLoop.join
def join(self, timeout=3600): """Wait until the event loop terminates or ``timeout`` is reached. This method is not meaningful unless called from the Qt "main thread", which is almost certainly the thread in which the component was created. :keyword float timeout: timeout in seconds. """ start = time.time() while self._running: now = time.time() maxtime = timeout + start - now if maxtime <= 0: return QCoreApplication.processEvents( QEventLoop.AllEvents, int(maxtime * 1000))
python
def join(self, timeout=3600): """Wait until the event loop terminates or ``timeout`` is reached. This method is not meaningful unless called from the Qt "main thread", which is almost certainly the thread in which the component was created. :keyword float timeout: timeout in seconds. """ start = time.time() while self._running: now = time.time() maxtime = timeout + start - now if maxtime <= 0: return QCoreApplication.processEvents( QEventLoop.AllEvents, int(maxtime * 1000))
[ "def", "join", "(", "self", ",", "timeout", "=", "3600", ")", ":", "start", "=", "time", ".", "time", "(", ")", "while", "self", ".", "_running", ":", "now", "=", "time", ".", "time", "(", ")", "maxtime", "=", "timeout", "+", "start", "-", "now", "if", "maxtime", "<=", "0", ":", "return", "QCoreApplication", ".", "processEvents", "(", "QEventLoop", ".", "AllEvents", ",", "int", "(", "maxtime", "*", "1000", ")", ")" ]
Wait until the event loop terminates or ``timeout`` is reached. This method is not meaningful unless called from the Qt "main thread", which is almost certainly the thread in which the component was created. :keyword float timeout: timeout in seconds.
[ "Wait", "until", "the", "event", "loop", "terminates", "or", "timeout", "is", "reached", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/qt.py#L112-L130
train
jim-easterbrook/pyctools
src/pyctools/components/deinterlace/intrafield.py
IntraField
def IntraField(config={}): """Intra field interlace to sequential converter. This uses a vertical filter with an aperture of 8 lines, generated by :py:class:`~pyctools.components.interp.filtergenerator.FilterGenerator`. The aperture (and other parameters) can be adjusted after the :py:class:`IntraField` component is created. """ return Compound( config = config, deint = SimpleDeinterlace(), interp = Resize(), filgen = FilterGenerator(yaperture=8, ycut=50), gain = Arithmetic(func='data * pt_float(2)'), linkages = { ('self', 'input') : [('deint', 'input')], ('deint', 'output') : [('interp', 'input')], ('interp', 'output') : [('self', 'output')], ('filgen', 'output') : [('gain', 'input')], ('gain', 'output') : [('interp', 'filter')], } )
python
def IntraField(config={}): """Intra field interlace to sequential converter. This uses a vertical filter with an aperture of 8 lines, generated by :py:class:`~pyctools.components.interp.filtergenerator.FilterGenerator`. The aperture (and other parameters) can be adjusted after the :py:class:`IntraField` component is created. """ return Compound( config = config, deint = SimpleDeinterlace(), interp = Resize(), filgen = FilterGenerator(yaperture=8, ycut=50), gain = Arithmetic(func='data * pt_float(2)'), linkages = { ('self', 'input') : [('deint', 'input')], ('deint', 'output') : [('interp', 'input')], ('interp', 'output') : [('self', 'output')], ('filgen', 'output') : [('gain', 'input')], ('gain', 'output') : [('interp', 'filter')], } )
[ "def", "IntraField", "(", "config", "=", "{", "}", ")", ":", "return", "Compound", "(", "config", "=", "config", ",", "deint", "=", "SimpleDeinterlace", "(", ")", ",", "interp", "=", "Resize", "(", ")", ",", "filgen", "=", "FilterGenerator", "(", "yaperture", "=", "8", ",", "ycut", "=", "50", ")", ",", "gain", "=", "Arithmetic", "(", "func", "=", "'data * pt_float(2)'", ")", ",", "linkages", "=", "{", "(", "'self'", ",", "'input'", ")", ":", "[", "(", "'deint'", ",", "'input'", ")", "]", ",", "(", "'deint'", ",", "'output'", ")", ":", "[", "(", "'interp'", ",", "'input'", ")", "]", ",", "(", "'interp'", ",", "'output'", ")", ":", "[", "(", "'self'", ",", "'output'", ")", "]", ",", "(", "'filgen'", ",", "'output'", ")", ":", "[", "(", "'gain'", ",", "'input'", ")", "]", ",", "(", "'gain'", ",", "'output'", ")", ":", "[", "(", "'interp'", ",", "'filter'", ")", "]", ",", "}", ")" ]
Intra field interlace to sequential converter. This uses a vertical filter with an aperture of 8 lines, generated by :py:class:`~pyctools.components.interp.filtergenerator.FilterGenerator`. The aperture (and other parameters) can be adjusted after the :py:class:`IntraField` component is created.
[ "Intra", "field", "interlace", "to", "sequential", "converter", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/components/deinterlace/intrafield.py#L28-L52
train
biplap-sarkar/pylimit
pylimit/pyratelimit.py
PyRateLimit.create
def create(self, period: int, limit: int): """ Creates a rate limiting rule with rate limiting period and attempt limit :param period: Rate limiting period in seconds :type period: int :param limit: Number of attempts permitted by rate limiting within a given period :type limit: int """ self.period = period self.limit = limit
python
def create(self, period: int, limit: int): """ Creates a rate limiting rule with rate limiting period and attempt limit :param period: Rate limiting period in seconds :type period: int :param limit: Number of attempts permitted by rate limiting within a given period :type limit: int """ self.period = period self.limit = limit
[ "def", "create", "(", "self", ",", "period", ":", "int", ",", "limit", ":", "int", ")", ":", "self", ".", "period", "=", "period", "self", ".", "limit", "=", "limit" ]
Creates a rate limiting rule with rate limiting period and attempt limit :param period: Rate limiting period in seconds :type period: int :param limit: Number of attempts permitted by rate limiting within a given period :type limit: int
[ "Creates", "a", "rate", "limiting", "rule", "with", "rate", "limiting", "period", "and", "attempt", "limit" ]
d2170a8c02a9be083f37c9e4ec1e28700a33d64e
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/pyratelimit.py#L39-L51
train
biplap-sarkar/pylimit
pylimit/pyratelimit.py
PyRateLimit.is_rate_limited
def is_rate_limited(self, namespace: str) -> bool: """ Checks if a namespace is already rate limited or not without making any additional attempts :param namespace: Rate limiting namespace :type namespace: str :return: Returns true if attempt can go ahead under current rate limiting rules, false otherwise """ return not self.__can_attempt(namespace=namespace, add_attempt=False)
python
def is_rate_limited(self, namespace: str) -> bool: """ Checks if a namespace is already rate limited or not without making any additional attempts :param namespace: Rate limiting namespace :type namespace: str :return: Returns true if attempt can go ahead under current rate limiting rules, false otherwise """ return not self.__can_attempt(namespace=namespace, add_attempt=False)
[ "def", "is_rate_limited", "(", "self", ",", "namespace", ":", "str", ")", "->", "bool", ":", "return", "not", "self", ".", "__can_attempt", "(", "namespace", "=", "namespace", ",", "add_attempt", "=", "False", ")" ]
Checks if a namespace is already rate limited or not without making any additional attempts :param namespace: Rate limiting namespace :type namespace: str :return: Returns true if attempt can go ahead under current rate limiting rules, false otherwise
[ "Checks", "if", "a", "namespace", "is", "already", "rate", "limited", "or", "not", "without", "making", "any", "additional", "attempts" ]
d2170a8c02a9be083f37c9e4ec1e28700a33d64e
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/pyratelimit.py#L96-L105
train
NUAA-Open-Source/NUAA-iCal-Python
NUAAiCal/AddToGCal.py
main
def main(): """Shows basic usage of the Google Calendar API. Creates a Google Calendar API service object and outputs a list of the next 10 events on the user's calendar. """ credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('calendar', 'v3', http=http) now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time print('Getting the upcoming 10 events') eventsResult = service.events().list( calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items', []) if not events: print('No upcoming events found.') for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary'])
python
def main(): """Shows basic usage of the Google Calendar API. Creates a Google Calendar API service object and outputs a list of the next 10 events on the user's calendar. """ credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('calendar', 'v3', http=http) now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time print('Getting the upcoming 10 events') eventsResult = service.events().list( calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items', []) if not events: print('No upcoming events found.') for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary'])
[ "def", "main", "(", ")", ":", "credentials", "=", "get_credentials", "(", ")", "http", "=", "credentials", ".", "authorize", "(", "httplib2", ".", "Http", "(", ")", ")", "service", "=", "discovery", ".", "build", "(", "'calendar'", ",", "'v3'", ",", "http", "=", "http", ")", "now", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", "+", "'Z'", "# 'Z' indicates UTC time", "print", "(", "'Getting the upcoming 10 events'", ")", "eventsResult", "=", "service", ".", "events", "(", ")", ".", "list", "(", "calendarId", "=", "'primary'", ",", "timeMin", "=", "now", ",", "maxResults", "=", "10", ",", "singleEvents", "=", "True", ",", "orderBy", "=", "'startTime'", ")", ".", "execute", "(", ")", "events", "=", "eventsResult", ".", "get", "(", "'items'", ",", "[", "]", ")", "if", "not", "events", ":", "print", "(", "'No upcoming events found.'", ")", "for", "event", "in", "events", ":", "start", "=", "event", "[", "'start'", "]", ".", "get", "(", "'dateTime'", ",", "event", "[", "'start'", "]", ".", "get", "(", "'date'", ")", ")", "print", "(", "start", ",", "event", "[", "'summary'", "]", ")" ]
Shows basic usage of the Google Calendar API. Creates a Google Calendar API service object and outputs a list of the next 10 events on the user's calendar.
[ "Shows", "basic", "usage", "of", "the", "Google", "Calendar", "API", "." ]
1bdc4016e4d8b236a12bba5047a5150f889bc880
https://github.com/NUAA-Open-Source/NUAA-iCal-Python/blob/1bdc4016e4d8b236a12bba5047a5150f889bc880/NUAAiCal/AddToGCal.py#L54-L75
train
kmike/port-for
port_for/docopt.py
Pattern.fix_list_arguments
def fix_list_arguments(self): """Find arguments that should accumulate values and fix them.""" either = [list(c.children) for c in self.either.children] for case in either: case = [c for c in case if case.count(c) > 1] for a in [e for e in case if type(e) == Argument]: a.value = [] return self
python
def fix_list_arguments(self): """Find arguments that should accumulate values and fix them.""" either = [list(c.children) for c in self.either.children] for case in either: case = [c for c in case if case.count(c) > 1] for a in [e for e in case if type(e) == Argument]: a.value = [] return self
[ "def", "fix_list_arguments", "(", "self", ")", ":", "either", "=", "[", "list", "(", "c", ".", "children", ")", "for", "c", "in", "self", ".", "either", ".", "children", "]", "for", "case", "in", "either", ":", "case", "=", "[", "c", "for", "c", "in", "case", "if", "case", ".", "count", "(", "c", ")", ">", "1", "]", "for", "a", "in", "[", "e", "for", "e", "in", "case", "if", "type", "(", "e", ")", "==", "Argument", "]", ":", "a", ".", "value", "=", "[", "]", "return", "self" ]
Find arguments that should accumulate values and fix them.
[ "Find", "arguments", "that", "should", "accumulate", "values", "and", "fix", "them", "." ]
f61ebf3c2caf54eabe8233b40ef67b973176a6f5
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/docopt.py#L59-L66
train
kmike/port-for
port_for/docopt.py
Pattern.either
def either(self): """Transform pattern into an equivalent, with only top-level Either.""" # Currently the pattern will not be equivalent, but more "narrow", # although good enough to reason about list arguments. if not hasattr(self, 'children'): return Either(Required(self)) else: ret = [] groups = [[self]] while groups: children = groups.pop(0) types = [type(c) for c in children] if Either in types: either = [c for c in children if type(c) is Either][0] children.pop(children.index(either)) for c in either.children: groups.append([c] + children) elif Required in types: required = [c for c in children if type(c) is Required][0] children.pop(children.index(required)) groups.append(list(required.children) + children) elif Optional in types: optional = [c for c in children if type(c) is Optional][0] children.pop(children.index(optional)) groups.append(list(optional.children) + children) elif OneOrMore in types: oneormore = [c for c in children if type(c) is OneOrMore][0] children.pop(children.index(oneormore)) groups.append(list(oneormore.children) * 2 + children) else: ret.append(children) return Either(*[Required(*e) for e in ret])
python
def either(self): """Transform pattern into an equivalent, with only top-level Either.""" # Currently the pattern will not be equivalent, but more "narrow", # although good enough to reason about list arguments. if not hasattr(self, 'children'): return Either(Required(self)) else: ret = [] groups = [[self]] while groups: children = groups.pop(0) types = [type(c) for c in children] if Either in types: either = [c for c in children if type(c) is Either][0] children.pop(children.index(either)) for c in either.children: groups.append([c] + children) elif Required in types: required = [c for c in children if type(c) is Required][0] children.pop(children.index(required)) groups.append(list(required.children) + children) elif Optional in types: optional = [c for c in children if type(c) is Optional][0] children.pop(children.index(optional)) groups.append(list(optional.children) + children) elif OneOrMore in types: oneormore = [c for c in children if type(c) is OneOrMore][0] children.pop(children.index(oneormore)) groups.append(list(oneormore.children) * 2 + children) else: ret.append(children) return Either(*[Required(*e) for e in ret])
[ "def", "either", "(", "self", ")", ":", "# Currently the pattern will not be equivalent, but more \"narrow\",", "# although good enough to reason about list arguments.", "if", "not", "hasattr", "(", "self", ",", "'children'", ")", ":", "return", "Either", "(", "Required", "(", "self", ")", ")", "else", ":", "ret", "=", "[", "]", "groups", "=", "[", "[", "self", "]", "]", "while", "groups", ":", "children", "=", "groups", ".", "pop", "(", "0", ")", "types", "=", "[", "type", "(", "c", ")", "for", "c", "in", "children", "]", "if", "Either", "in", "types", ":", "either", "=", "[", "c", "for", "c", "in", "children", "if", "type", "(", "c", ")", "is", "Either", "]", "[", "0", "]", "children", ".", "pop", "(", "children", ".", "index", "(", "either", ")", ")", "for", "c", "in", "either", ".", "children", ":", "groups", ".", "append", "(", "[", "c", "]", "+", "children", ")", "elif", "Required", "in", "types", ":", "required", "=", "[", "c", "for", "c", "in", "children", "if", "type", "(", "c", ")", "is", "Required", "]", "[", "0", "]", "children", ".", "pop", "(", "children", ".", "index", "(", "required", ")", ")", "groups", ".", "append", "(", "list", "(", "required", ".", "children", ")", "+", "children", ")", "elif", "Optional", "in", "types", ":", "optional", "=", "[", "c", "for", "c", "in", "children", "if", "type", "(", "c", ")", "is", "Optional", "]", "[", "0", "]", "children", ".", "pop", "(", "children", ".", "index", "(", "optional", ")", ")", "groups", ".", "append", "(", "list", "(", "optional", ".", "children", ")", "+", "children", ")", "elif", "OneOrMore", "in", "types", ":", "oneormore", "=", "[", "c", "for", "c", "in", "children", "if", "type", "(", "c", ")", "is", "OneOrMore", "]", "[", "0", "]", "children", ".", "pop", "(", "children", ".", "index", "(", "oneormore", ")", ")", "groups", ".", "append", "(", "list", "(", "oneormore", ".", "children", ")", "*", "2", "+", "children", ")", "else", ":", "ret", ".", "append", "(", "children", ")", "return", "Either", "(", "*", "[", "Required", "(", "*", "e", ")", "for", "e", "in", "ret", "]", ")" ]
Transform pattern into an equivalent, with only top-level Either.
[ "Transform", "pattern", "into", "an", "equivalent", "with", "only", "top", "-", "level", "Either", "." ]
f61ebf3c2caf54eabe8233b40ef67b973176a6f5
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/docopt.py#L69-L100
train
nicfit/MishMash
mishmash/commands/sync/utils.py
syncImage
def syncImage(img, current, session): """Add or updated the Image.""" def _img_str(i): return "%s - %s" % (i.type, i.description) for db_img in current.images: img_info = (img.type, img.md5, img.size) db_img_info = (db_img.type, db_img.md5, db_img.size) if db_img_info == img_info: img = None break elif (db_img.type == img.type and db_img.description == img.description): if img.md5 != db_img.md5: # Update image current.images.remove(db_img) current.images.append(img) session.add(current) pout(Fg.green("Updating image") + ": " + _img_str(img)) img = None break if img: # Add image current.images.append(img) session.add(current) pout(Fg.green("Adding image") + ": " + _img_str(img))
python
def syncImage(img, current, session): """Add or updated the Image.""" def _img_str(i): return "%s - %s" % (i.type, i.description) for db_img in current.images: img_info = (img.type, img.md5, img.size) db_img_info = (db_img.type, db_img.md5, db_img.size) if db_img_info == img_info: img = None break elif (db_img.type == img.type and db_img.description == img.description): if img.md5 != db_img.md5: # Update image current.images.remove(db_img) current.images.append(img) session.add(current) pout(Fg.green("Updating image") + ": " + _img_str(img)) img = None break if img: # Add image current.images.append(img) session.add(current) pout(Fg.green("Adding image") + ": " + _img_str(img))
[ "def", "syncImage", "(", "img", ",", "current", ",", "session", ")", ":", "def", "_img_str", "(", "i", ")", ":", "return", "\"%s - %s\"", "%", "(", "i", ".", "type", ",", "i", ".", "description", ")", "for", "db_img", "in", "current", ".", "images", ":", "img_info", "=", "(", "img", ".", "type", ",", "img", ".", "md5", ",", "img", ".", "size", ")", "db_img_info", "=", "(", "db_img", ".", "type", ",", "db_img", ".", "md5", ",", "db_img", ".", "size", ")", "if", "db_img_info", "==", "img_info", ":", "img", "=", "None", "break", "elif", "(", "db_img", ".", "type", "==", "img", ".", "type", "and", "db_img", ".", "description", "==", "img", ".", "description", ")", ":", "if", "img", ".", "md5", "!=", "db_img", ".", "md5", ":", "# Update image", "current", ".", "images", ".", "remove", "(", "db_img", ")", "current", ".", "images", ".", "append", "(", "img", ")", "session", ".", "add", "(", "current", ")", "pout", "(", "Fg", ".", "green", "(", "\"Updating image\"", ")", "+", "\": \"", "+", "_img_str", "(", "img", ")", ")", "img", "=", "None", "break", "if", "img", ":", "# Add image", "current", ".", "images", ".", "append", "(", "img", ")", "session", ".", "add", "(", "current", ")", "pout", "(", "Fg", ".", "green", "(", "\"Adding image\"", ")", "+", "\": \"", "+", "_img_str", "(", "img", ")", ")" ]
Add or updated the Image.
[ "Add", "or", "updated", "the", "Image", "." ]
8f988936340bf0ffb83ea90ea124efb3c36a1174
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/commands/sync/utils.py#L64-L92
train
magopian/django-data-exports
data_exports/forms.py
ColumnFormSet.add_fields
def add_fields(self, form, index): """Filter the form's column choices This is done at the formset level as there's no other way i could find to get the parent object (stored in self.instance), and the form at the same time. """ super(ColumnFormSet, self).add_fields(form, index) form.fields['column'].choices = self.get_choices()
python
def add_fields(self, form, index): """Filter the form's column choices This is done at the formset level as there's no other way i could find to get the parent object (stored in self.instance), and the form at the same time. """ super(ColumnFormSet, self).add_fields(form, index) form.fields['column'].choices = self.get_choices()
[ "def", "add_fields", "(", "self", ",", "form", ",", "index", ")", ":", "super", "(", "ColumnFormSet", ",", "self", ")", ".", "add_fields", "(", "form", ",", "index", ")", "form", ".", "fields", "[", "'column'", "]", ".", "choices", "=", "self", ".", "get_choices", "(", ")" ]
Filter the form's column choices This is done at the formset level as there's no other way i could find to get the parent object (stored in self.instance), and the form at the same time.
[ "Filter", "the", "form", "s", "column", "choices" ]
a73db486779d93046ad89c5bf582ff8ae869120f
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/forms.py#L39-L48
train
jim-easterbrook/pyctools
src/pyctools/setup.py
find_packages
def find_packages(): """Walk source directory tree and convert each sub directory to a package name. """ packages = ['pyctools'] for root, dirs, files in os.walk(os.path.join('src', 'pyctools')): package = '.'.join(root.split(os.sep)[1:]) for name in dirs: packages.append(package + '.' + name) return packages
python
def find_packages(): """Walk source directory tree and convert each sub directory to a package name. """ packages = ['pyctools'] for root, dirs, files in os.walk(os.path.join('src', 'pyctools')): package = '.'.join(root.split(os.sep)[1:]) for name in dirs: packages.append(package + '.' + name) return packages
[ "def", "find_packages", "(", ")", ":", "packages", "=", "[", "'pyctools'", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "os", ".", "path", ".", "join", "(", "'src'", ",", "'pyctools'", ")", ")", ":", "package", "=", "'.'", ".", "join", "(", "root", ".", "split", "(", "os", ".", "sep", ")", "[", "1", ":", "]", ")", "for", "name", "in", "dirs", ":", "packages", ".", "append", "(", "package", "+", "'.'", "+", "name", ")", "return", "packages" ]
Walk source directory tree and convert each sub directory to a package name.
[ "Walk", "source", "directory", "tree", "and", "convert", "each", "sub", "directory", "to", "a", "package", "name", "." ]
2a958665326892f45f249bebe62c2c23f306732b
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/setup.py#L41-L51
train
jaraco/jaraco.mongodb
jaraco/mongodb/cli.py
extract_param
def extract_param(param, args, type=None): """ From a list of args, extract the one param if supplied, returning the value and unused args. >>> extract_param('port', ['foo', '--port=999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', '--port', '999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', 'bar']) (None, ['foo', 'bar']) """ parser = argparse.ArgumentParser() parser.add_argument('--' + param, type=type) res, unused = parser.parse_known_args(args) return getattr(res, param), unused
python
def extract_param(param, args, type=None): """ From a list of args, extract the one param if supplied, returning the value and unused args. >>> extract_param('port', ['foo', '--port=999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', '--port', '999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', 'bar']) (None, ['foo', 'bar']) """ parser = argparse.ArgumentParser() parser.add_argument('--' + param, type=type) res, unused = parser.parse_known_args(args) return getattr(res, param), unused
[ "def", "extract_param", "(", "param", ",", "args", ",", "type", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--'", "+", "param", ",", "type", "=", "type", ")", "res", ",", "unused", "=", "parser", ".", "parse_known_args", "(", "args", ")", "return", "getattr", "(", "res", ",", "param", ")", ",", "unused" ]
From a list of args, extract the one param if supplied, returning the value and unused args. >>> extract_param('port', ['foo', '--port=999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', '--port', '999', 'bar'], type=int) (999, ['foo', 'bar']) >>> extract_param('port', ['foo', 'bar']) (None, ['foo', 'bar'])
[ "From", "a", "list", "of", "args", "extract", "the", "one", "param", "if", "supplied", "returning", "the", "value", "and", "unused", "args", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/cli.py#L4-L19
train
yeraydiazdiaz/lunr.py
lunr/languages/__init__.py
get_nltk_builder
def get_nltk_builder(languages): """Returns a builder with stemmers for all languages added to it. Args: languages (list): A list of supported languages. """ all_stemmers = [] all_stopwords_filters = [] all_word_characters = set() for language in languages: if language == "en": # use Lunr's defaults all_stemmers.append(lunr.stemmer.stemmer) all_stopwords_filters.append(stop_word_filter) all_word_characters.update({r"\w"}) else: stopwords, word_characters = _get_stopwords_and_word_characters(language) all_stemmers.append( Pipeline.registered_functions["stemmer-{}".format(language)] ) all_stopwords_filters.append( generate_stop_word_filter(stopwords, language=language) ) all_word_characters.update(word_characters) builder = Builder() multi_trimmer = generate_trimmer("".join(sorted(all_word_characters))) Pipeline.register_function( multi_trimmer, "lunr-multi-trimmer-{}".format("-".join(languages)) ) builder.pipeline.reset() for fn in chain([multi_trimmer], all_stopwords_filters, all_stemmers): builder.pipeline.add(fn) for fn in all_stemmers: builder.search_pipeline.add(fn) return builder
python
def get_nltk_builder(languages): """Returns a builder with stemmers for all languages added to it. Args: languages (list): A list of supported languages. """ all_stemmers = [] all_stopwords_filters = [] all_word_characters = set() for language in languages: if language == "en": # use Lunr's defaults all_stemmers.append(lunr.stemmer.stemmer) all_stopwords_filters.append(stop_word_filter) all_word_characters.update({r"\w"}) else: stopwords, word_characters = _get_stopwords_and_word_characters(language) all_stemmers.append( Pipeline.registered_functions["stemmer-{}".format(language)] ) all_stopwords_filters.append( generate_stop_word_filter(stopwords, language=language) ) all_word_characters.update(word_characters) builder = Builder() multi_trimmer = generate_trimmer("".join(sorted(all_word_characters))) Pipeline.register_function( multi_trimmer, "lunr-multi-trimmer-{}".format("-".join(languages)) ) builder.pipeline.reset() for fn in chain([multi_trimmer], all_stopwords_filters, all_stemmers): builder.pipeline.add(fn) for fn in all_stemmers: builder.search_pipeline.add(fn) return builder
[ "def", "get_nltk_builder", "(", "languages", ")", ":", "all_stemmers", "=", "[", "]", "all_stopwords_filters", "=", "[", "]", "all_word_characters", "=", "set", "(", ")", "for", "language", "in", "languages", ":", "if", "language", "==", "\"en\"", ":", "# use Lunr's defaults", "all_stemmers", ".", "append", "(", "lunr", ".", "stemmer", ".", "stemmer", ")", "all_stopwords_filters", ".", "append", "(", "stop_word_filter", ")", "all_word_characters", ".", "update", "(", "{", "r\"\\w\"", "}", ")", "else", ":", "stopwords", ",", "word_characters", "=", "_get_stopwords_and_word_characters", "(", "language", ")", "all_stemmers", ".", "append", "(", "Pipeline", ".", "registered_functions", "[", "\"stemmer-{}\"", ".", "format", "(", "language", ")", "]", ")", "all_stopwords_filters", ".", "append", "(", "generate_stop_word_filter", "(", "stopwords", ",", "language", "=", "language", ")", ")", "all_word_characters", ".", "update", "(", "word_characters", ")", "builder", "=", "Builder", "(", ")", "multi_trimmer", "=", "generate_trimmer", "(", "\"\"", ".", "join", "(", "sorted", "(", "all_word_characters", ")", ")", ")", "Pipeline", ".", "register_function", "(", "multi_trimmer", ",", "\"lunr-multi-trimmer-{}\"", ".", "format", "(", "\"-\"", ".", "join", "(", "languages", ")", ")", ")", "builder", ".", "pipeline", ".", "reset", "(", ")", "for", "fn", "in", "chain", "(", "[", "multi_trimmer", "]", ",", "all_stopwords_filters", ",", "all_stemmers", ")", ":", "builder", ".", "pipeline", ".", "add", "(", "fn", ")", "for", "fn", "in", "all_stemmers", ":", "builder", ".", "search_pipeline", ".", "add", "(", "fn", ")", "return", "builder" ]
Returns a builder with stemmers for all languages added to it. Args: languages (list): A list of supported languages.
[ "Returns", "a", "builder", "with", "stemmers", "for", "all", "languages", "added", "to", "it", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/languages/__init__.py#L51-L89
train
yeraydiazdiaz/lunr.py
lunr/languages/__init__.py
register_languages
def register_languages(): """Register all supported languages to ensure compatibility.""" for language in set(SUPPORTED_LANGUAGES) - {"en"}: language_stemmer = partial(nltk_stemmer, get_language_stemmer(language)) Pipeline.register_function(language_stemmer, "stemmer-{}".format(language))
python
def register_languages(): """Register all supported languages to ensure compatibility.""" for language in set(SUPPORTED_LANGUAGES) - {"en"}: language_stemmer = partial(nltk_stemmer, get_language_stemmer(language)) Pipeline.register_function(language_stemmer, "stemmer-{}".format(language))
[ "def", "register_languages", "(", ")", ":", "for", "language", "in", "set", "(", "SUPPORTED_LANGUAGES", ")", "-", "{", "\"en\"", "}", ":", "language_stemmer", "=", "partial", "(", "nltk_stemmer", ",", "get_language_stemmer", "(", "language", ")", ")", "Pipeline", ".", "register_function", "(", "language_stemmer", ",", "\"stemmer-{}\"", ".", "format", "(", "language", ")", ")" ]
Register all supported languages to ensure compatibility.
[ "Register", "all", "supported", "languages", "to", "ensure", "compatibility", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/languages/__init__.py#L92-L96
train
tehmaze/natural
natural/number.py
ordinal
def ordinal(value): ''' Converts a number to its ordinal representation. :param value: number >>> print(ordinal(1)) 1st >>> print(ordinal(11)) 11th >>> print(ordinal(101)) 101st >>> print(ordinal(104)) 104th >>> print(ordinal(113)) 113th >>> print(ordinal(123)) 123rd ''' try: value = int(value) except (TypeError, ValueError): raise ValueError if value % 100 in (11, 12, 13): return '%d%s' % (value, ORDINAL_SUFFIX[0]) else: return '%d%s' % (value, ORDINAL_SUFFIX[value % 10])
python
def ordinal(value): ''' Converts a number to its ordinal representation. :param value: number >>> print(ordinal(1)) 1st >>> print(ordinal(11)) 11th >>> print(ordinal(101)) 101st >>> print(ordinal(104)) 104th >>> print(ordinal(113)) 113th >>> print(ordinal(123)) 123rd ''' try: value = int(value) except (TypeError, ValueError): raise ValueError if value % 100 in (11, 12, 13): return '%d%s' % (value, ORDINAL_SUFFIX[0]) else: return '%d%s' % (value, ORDINAL_SUFFIX[value % 10])
[ "def", "ordinal", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "if", "value", "%", "100", "in", "(", "11", ",", "12", ",", "13", ")", ":", "return", "'%d%s'", "%", "(", "value", ",", "ORDINAL_SUFFIX", "[", "0", "]", ")", "else", ":", "return", "'%d%s'", "%", "(", "value", ",", "ORDINAL_SUFFIX", "[", "value", "%", "10", "]", ")" ]
Converts a number to its ordinal representation. :param value: number >>> print(ordinal(1)) 1st >>> print(ordinal(11)) 11th >>> print(ordinal(101)) 101st >>> print(ordinal(104)) 104th >>> print(ordinal(113)) 113th >>> print(ordinal(123)) 123rd
[ "Converts", "a", "number", "to", "its", "ordinal", "representation", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L45-L73
train
tehmaze/natural
natural/number.py
percentage
def percentage(value, digits=2): ''' Converts a fraction to a formatted percentage. :param value: number :param digits: default ``2`` >>> print(percentage(1)) 100.00 % >>> print(percentage(0.23, digits=0)) 23 % >>> print(percentage(23.421)) 2,342.10 % ''' value = float(value) * 100.0 return u'' + '%s %%' % (_format(value, digits),)
python
def percentage(value, digits=2): ''' Converts a fraction to a formatted percentage. :param value: number :param digits: default ``2`` >>> print(percentage(1)) 100.00 % >>> print(percentage(0.23, digits=0)) 23 % >>> print(percentage(23.421)) 2,342.10 % ''' value = float(value) * 100.0 return u'' + '%s %%' % (_format(value, digits),)
[ "def", "percentage", "(", "value", ",", "digits", "=", "2", ")", ":", "value", "=", "float", "(", "value", ")", "*", "100.0", "return", "u''", "+", "'%s %%'", "%", "(", "_format", "(", "value", ",", "digits", ")", ",", ")" ]
Converts a fraction to a formatted percentage. :param value: number :param digits: default ``2`` >>> print(percentage(1)) 100.00 % >>> print(percentage(0.23, digits=0)) 23 % >>> print(percentage(23.421)) 2,342.10 %
[ "Converts", "a", "fraction", "to", "a", "formatted", "percentage", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L117-L134
train
tehmaze/natural
natural/number.py
word
def word(value, digits=2): ''' Converts a large number to a formatted number containing the textual suffix for that number. :param value: number >>> print(word(1)) 1 >>> print(word(123456789)) 123.46 million ''' convention = locale.localeconv() decimal_point = convention['decimal_point'] decimal_zero = re.compile(r'%s0+' % re.escape(decimal_point)) prefix = value < 0 and '-' or '' value = abs(int(value)) if value < 1000: return u''.join([ prefix, decimal_zero.sub('', _format(value, digits)), ]) for base, suffix in enumerate(LARGE_NUMBER_SUFFIX): exp = (base + 2) * 3 power = 10 ** exp if value < power: value = value / float(10 ** (exp - 3)) return ''.join([ prefix, decimal_zero.sub('', _format(value, digits)), ' ', suffix, ]) raise OverflowError
python
def word(value, digits=2): ''' Converts a large number to a formatted number containing the textual suffix for that number. :param value: number >>> print(word(1)) 1 >>> print(word(123456789)) 123.46 million ''' convention = locale.localeconv() decimal_point = convention['decimal_point'] decimal_zero = re.compile(r'%s0+' % re.escape(decimal_point)) prefix = value < 0 and '-' or '' value = abs(int(value)) if value < 1000: return u''.join([ prefix, decimal_zero.sub('', _format(value, digits)), ]) for base, suffix in enumerate(LARGE_NUMBER_SUFFIX): exp = (base + 2) * 3 power = 10 ** exp if value < power: value = value / float(10 ** (exp - 3)) return ''.join([ prefix, decimal_zero.sub('', _format(value, digits)), ' ', suffix, ]) raise OverflowError
[ "def", "word", "(", "value", ",", "digits", "=", "2", ")", ":", "convention", "=", "locale", ".", "localeconv", "(", ")", "decimal_point", "=", "convention", "[", "'decimal_point'", "]", "decimal_zero", "=", "re", ".", "compile", "(", "r'%s0+'", "%", "re", ".", "escape", "(", "decimal_point", ")", ")", "prefix", "=", "value", "<", "0", "and", "'-'", "or", "''", "value", "=", "abs", "(", "int", "(", "value", ")", ")", "if", "value", "<", "1000", ":", "return", "u''", ".", "join", "(", "[", "prefix", ",", "decimal_zero", ".", "sub", "(", "''", ",", "_format", "(", "value", ",", "digits", ")", ")", ",", "]", ")", "for", "base", ",", "suffix", "in", "enumerate", "(", "LARGE_NUMBER_SUFFIX", ")", ":", "exp", "=", "(", "base", "+", "2", ")", "*", "3", "power", "=", "10", "**", "exp", "if", "value", "<", "power", ":", "value", "=", "value", "/", "float", "(", "10", "**", "(", "exp", "-", "3", ")", ")", "return", "''", ".", "join", "(", "[", "prefix", ",", "decimal_zero", ".", "sub", "(", "''", ",", "_format", "(", "value", ",", "digits", ")", ")", ",", "' '", ",", "suffix", ",", "]", ")", "raise", "OverflowError" ]
Converts a large number to a formatted number containing the textual suffix for that number. :param value: number >>> print(word(1)) 1 >>> print(word(123456789)) 123.46 million
[ "Converts", "a", "large", "number", "to", "a", "formatted", "number", "containing", "the", "textual", "suffix", "for", "that", "number", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L137-L174
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
_full_rename
def _full_rename(args): """ Return True only if the arguments passed specify exact namespaces and to conduct a rename of every namespace. """ return ( args.ns and all(map(args.rename.affects, args.ns)) )
python
def _full_rename(args): """ Return True only if the arguments passed specify exact namespaces and to conduct a rename of every namespace. """ return ( args.ns and all(map(args.rename.affects, args.ns)) )
[ "def", "_full_rename", "(", "args", ")", ":", "return", "(", "args", ".", "ns", "and", "all", "(", "map", "(", "args", ".", "rename", ".", "affects", ",", "args", ".", "ns", ")", ")", ")" ]
Return True only if the arguments passed specify exact namespaces and to conduct a rename of every namespace.
[ "Return", "True", "only", "if", "the", "arguments", "passed", "specify", "exact", "namespaces", "and", "to", "conduct", "a", "rename", "of", "every", "namespace", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L297-L305
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
apply
def apply(db, op): """ Apply operation in db """ dbname = op['ns'].split('.')[0] or "admin" opts = bson.CodecOptions(uuid_representation=bson.binary.STANDARD) db[dbname].command("applyOps", [op], codec_options=opts)
python
def apply(db, op): """ Apply operation in db """ dbname = op['ns'].split('.')[0] or "admin" opts = bson.CodecOptions(uuid_representation=bson.binary.STANDARD) db[dbname].command("applyOps", [op], codec_options=opts)
[ "def", "apply", "(", "db", ",", "op", ")", ":", "dbname", "=", "op", "[", "'ns'", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", "or", "\"admin\"", "opts", "=", "bson", ".", "CodecOptions", "(", "uuid_representation", "=", "bson", ".", "binary", ".", "STANDARD", ")", "db", "[", "dbname", "]", ".", "command", "(", "\"applyOps\"", ",", "[", "op", "]", ",", "codec_options", "=", "opts", ")" ]
Apply operation in db
[ "Apply", "operation", "in", "db" ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L445-L451
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
Oplog.since
def since(self, ts): """ Query the oplog for items since ts and then return """ spec = {'ts': {'$gt': ts}} cursor = self.query(spec) while True: # todo: trap InvalidDocument errors: # except bson.errors.InvalidDocument as e: # logging.info(repr(e)) for doc in cursor: yield doc if not cursor.alive: break time.sleep(1)
python
def since(self, ts): """ Query the oplog for items since ts and then return """ spec = {'ts': {'$gt': ts}} cursor = self.query(spec) while True: # todo: trap InvalidDocument errors: # except bson.errors.InvalidDocument as e: # logging.info(repr(e)) for doc in cursor: yield doc if not cursor.alive: break time.sleep(1)
[ "def", "since", "(", "self", ",", "ts", ")", ":", "spec", "=", "{", "'ts'", ":", "{", "'$gt'", ":", "ts", "}", "}", "cursor", "=", "self", ".", "query", "(", "spec", ")", "while", "True", ":", "# todo: trap InvalidDocument errors:", "# except bson.errors.InvalidDocument as e:", "# logging.info(repr(e))", "for", "doc", "in", "cursor", ":", "yield", "doc", "if", "not", "cursor", ".", "alive", ":", "break", "time", ".", "sleep", "(", "1", ")" ]
Query the oplog for items since ts and then return
[ "Query", "the", "oplog", "for", "items", "since", "ts", "and", "then", "return" ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L472-L486
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
Oplog.has_ops_before
def has_ops_before(self, ts): """ Determine if there are any ops before ts """ spec = {'ts': {'$lt': ts}} return bool(self.coll.find_one(spec))
python
def has_ops_before(self, ts): """ Determine if there are any ops before ts """ spec = {'ts': {'$lt': ts}} return bool(self.coll.find_one(spec))
[ "def", "has_ops_before", "(", "self", ",", "ts", ")", ":", "spec", "=", "{", "'ts'", ":", "{", "'$lt'", ":", "ts", "}", "}", "return", "bool", "(", "self", ".", "coll", ".", "find_one", "(", "spec", ")", ")" ]
Determine if there are any ops before ts
[ "Determine", "if", "there", "are", "any", "ops", "before", "ts" ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L488-L493
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
TailingOplog.since
def since(self, ts): """ Tail the oplog, starting from ts. """ while True: items = super(TailingOplog, self).since(ts) for doc in items: yield doc ts = doc['ts']
python
def since(self, ts): """ Tail the oplog, starting from ts. """ while True: items = super(TailingOplog, self).since(ts) for doc in items: yield doc ts = doc['ts']
[ "def", "since", "(", "self", ",", "ts", ")", ":", "while", "True", ":", "items", "=", "super", "(", "TailingOplog", ",", "self", ")", ".", "since", "(", "ts", ")", "for", "doc", "in", "items", ":", "yield", "doc", "ts", "=", "doc", "[", "'ts'", "]" ]
Tail the oplog, starting from ts.
[ "Tail", "the", "oplog", "starting", "from", "ts", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L502-L510
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
Timestamp.dump
def dump(self, stream): """Serialize self to text stream. Matches convention of mongooplog. """ items = ( ('time', self.time), ('inc', self.inc), ) # use ordered dict to retain order ts = collections.OrderedDict(items) json.dump(dict(ts=ts), stream)
python
def dump(self, stream): """Serialize self to text stream. Matches convention of mongooplog. """ items = ( ('time', self.time), ('inc', self.inc), ) # use ordered dict to retain order ts = collections.OrderedDict(items) json.dump(dict(ts=ts), stream)
[ "def", "dump", "(", "self", ",", "stream", ")", ":", "items", "=", "(", "(", "'time'", ",", "self", ".", "time", ")", ",", "(", "'inc'", ",", "self", ".", "inc", ")", ",", ")", "# use ordered dict to retain order", "ts", "=", "collections", ".", "OrderedDict", "(", "items", ")", "json", ".", "dump", "(", "dict", "(", "ts", "=", "ts", ")", ",", "stream", ")" ]
Serialize self to text stream. Matches convention of mongooplog.
[ "Serialize", "self", "to", "text", "stream", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L524-L535
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
Timestamp.load
def load(cls, stream): """Load a serialized version of self from text stream. Expects the format used by mongooplog. """ data = json.load(stream)['ts'] return cls(data['time'], data['inc'])
python
def load(cls, stream): """Load a serialized version of self from text stream. Expects the format used by mongooplog. """ data = json.load(stream)['ts'] return cls(data['time'], data['inc'])
[ "def", "load", "(", "cls", ",", "stream", ")", ":", "data", "=", "json", ".", "load", "(", "stream", ")", "[", "'ts'", "]", "return", "cls", "(", "data", "[", "'time'", "]", ",", "data", "[", "'inc'", "]", ")" ]
Load a serialized version of self from text stream. Expects the format used by mongooplog.
[ "Load", "a", "serialized", "version", "of", "self", "from", "text", "stream", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L538-L544
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
Timestamp.for_window
def for_window(cls, window): """ Given a timedelta window, return a timestamp representing that time. """ utcnow = datetime.datetime.utcnow() return cls(utcnow - window, 0)
python
def for_window(cls, window): """ Given a timedelta window, return a timestamp representing that time. """ utcnow = datetime.datetime.utcnow() return cls(utcnow - window, 0)
[ "def", "for_window", "(", "cls", ",", "window", ")", ":", "utcnow", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "return", "cls", "(", "utcnow", "-", "window", ",", "0", ")" ]
Given a timedelta window, return a timestamp representing that time.
[ "Given", "a", "timedelta", "window", "return", "a", "timestamp", "representing", "that", "time", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L547-L553
train
jaraco/jaraco.mongodb
jaraco/mongodb/oplog.py
ResumeFile.save
def save(self, ts): """ Save timestamp to file. """ with open(self, 'w') as f: Timestamp.wrap(ts).dump(f)
python
def save(self, ts): """ Save timestamp to file. """ with open(self, 'w') as f: Timestamp.wrap(ts).dump(f)
[ "def", "save", "(", "self", ",", "ts", ")", ":", "with", "open", "(", "self", ",", "'w'", ")", "as", "f", ":", "Timestamp", ".", "wrap", "(", "ts", ")", ".", "dump", "(", "f", ")" ]
Save timestamp to file.
[ "Save", "timestamp", "to", "file", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L557-L562
train
yeraydiazdiaz/lunr.py
lunr/tokenizer.py
Tokenizer
def Tokenizer(obj, metadata=None, separator=SEPARATOR): """Splits a string into tokens ready to be inserted into the search index. This tokenizer will convert its parameter to a string by calling `str` and then will split this string on characters matching `separator`. Lists will have their elements converted to strings and wrapped in a lunr `Token`. Optional metadata can be passed to the tokenizer, this metadata will be cloned and added as metadata to every token that is created from the object to be tokenized. """ if obj is None: return [] metadata = metadata or {} if isinstance(obj, (list, tuple)): return [ Token(as_string(element).lower(), deepcopy(metadata)) for element in obj ] string = str(obj).strip().lower() length = len(string) tokens = [] slice_start = 0 for slice_end in range(length): char = string[slice_end] slice_length = slice_end - slice_start if separator.match(char) or slice_end == length - 1: if slice_length > 0: sl = slice(slice_start, slice_end if slice_end < length - 1 else None) token_metadata = {} token_metadata["position"] = [ slice_start, slice_length if slice_end < length - 1 else slice_length + 1, ] token_metadata["index"] = len(tokens) token_metadata.update(metadata) tokens.append(Token(string[sl], token_metadata)) slice_start = slice_end + 1 return tokens
python
def Tokenizer(obj, metadata=None, separator=SEPARATOR): """Splits a string into tokens ready to be inserted into the search index. This tokenizer will convert its parameter to a string by calling `str` and then will split this string on characters matching `separator`. Lists will have their elements converted to strings and wrapped in a lunr `Token`. Optional metadata can be passed to the tokenizer, this metadata will be cloned and added as metadata to every token that is created from the object to be tokenized. """ if obj is None: return [] metadata = metadata or {} if isinstance(obj, (list, tuple)): return [ Token(as_string(element).lower(), deepcopy(metadata)) for element in obj ] string = str(obj).strip().lower() length = len(string) tokens = [] slice_start = 0 for slice_end in range(length): char = string[slice_end] slice_length = slice_end - slice_start if separator.match(char) or slice_end == length - 1: if slice_length > 0: sl = slice(slice_start, slice_end if slice_end < length - 1 else None) token_metadata = {} token_metadata["position"] = [ slice_start, slice_length if slice_end < length - 1 else slice_length + 1, ] token_metadata["index"] = len(tokens) token_metadata.update(metadata) tokens.append(Token(string[sl], token_metadata)) slice_start = slice_end + 1 return tokens
[ "def", "Tokenizer", "(", "obj", ",", "metadata", "=", "None", ",", "separator", "=", "SEPARATOR", ")", ":", "if", "obj", "is", "None", ":", "return", "[", "]", "metadata", "=", "metadata", "or", "{", "}", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "Token", "(", "as_string", "(", "element", ")", ".", "lower", "(", ")", ",", "deepcopy", "(", "metadata", ")", ")", "for", "element", "in", "obj", "]", "string", "=", "str", "(", "obj", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "length", "=", "len", "(", "string", ")", "tokens", "=", "[", "]", "slice_start", "=", "0", "for", "slice_end", "in", "range", "(", "length", ")", ":", "char", "=", "string", "[", "slice_end", "]", "slice_length", "=", "slice_end", "-", "slice_start", "if", "separator", ".", "match", "(", "char", ")", "or", "slice_end", "==", "length", "-", "1", ":", "if", "slice_length", ">", "0", ":", "sl", "=", "slice", "(", "slice_start", ",", "slice_end", "if", "slice_end", "<", "length", "-", "1", "else", "None", ")", "token_metadata", "=", "{", "}", "token_metadata", "[", "\"position\"", "]", "=", "[", "slice_start", ",", "slice_length", "if", "slice_end", "<", "length", "-", "1", "else", "slice_length", "+", "1", ",", "]", "token_metadata", "[", "\"index\"", "]", "=", "len", "(", "tokens", ")", "token_metadata", ".", "update", "(", "metadata", ")", "tokens", ".", "append", "(", "Token", "(", "string", "[", "sl", "]", ",", "token_metadata", ")", ")", "slice_start", "=", "slice_end", "+", "1", "return", "tokens" ]
Splits a string into tokens ready to be inserted into the search index. This tokenizer will convert its parameter to a string by calling `str` and then will split this string on characters matching `separator`. Lists will have their elements converted to strings and wrapped in a lunr `Token`. Optional metadata can be passed to the tokenizer, this metadata will be cloned and added as metadata to every token that is created from the object to be tokenized.
[ "Splits", "a", "string", "into", "tokens", "ready", "to", "be", "inserted", "into", "the", "search", "index", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/tokenizer.py#L14-L59
train
jaraco/jaraco.mongodb
jaraco/mongodb/manage.py
all_collections
def all_collections(db): """ Yield all non-sytem collections in db. """ include_pattern = r'(?!system\.)' return ( db[name] for name in db.list_collection_names() if re.match(include_pattern, name) )
python
def all_collections(db): """ Yield all non-sytem collections in db. """ include_pattern = r'(?!system\.)' return ( db[name] for name in db.list_collection_names() if re.match(include_pattern, name) )
[ "def", "all_collections", "(", "db", ")", ":", "include_pattern", "=", "r'(?!system\\.)'", "return", "(", "db", "[", "name", "]", "for", "name", "in", "db", ".", "list_collection_names", "(", ")", "if", "re", ".", "match", "(", "include_pattern", ",", "name", ")", ")" ]
Yield all non-sytem collections in db.
[ "Yield", "all", "non", "-", "sytem", "collections", "in", "db", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/manage.py#L16-L25
train
jaraco/jaraco.mongodb
jaraco/mongodb/manage.py
safe_purge_collection
def safe_purge_collection(coll): """ Cannot remove documents from capped collections in later versions of MongoDB, so drop the collection instead. """ op = ( drop_collection if coll.options().get('capped', False) else purge_collection ) return op(coll)
python
def safe_purge_collection(coll): """ Cannot remove documents from capped collections in later versions of MongoDB, so drop the collection instead. """ op = ( drop_collection if coll.options().get('capped', False) else purge_collection ) return op(coll)
[ "def", "safe_purge_collection", "(", "coll", ")", ":", "op", "=", "(", "drop_collection", "if", "coll", ".", "options", "(", ")", ".", "get", "(", "'capped'", ",", "False", ")", "else", "purge_collection", ")", "return", "op", "(", "coll", ")" ]
Cannot remove documents from capped collections in later versions of MongoDB, so drop the collection instead.
[ "Cannot", "remove", "documents", "from", "capped", "collections", "in", "later", "versions", "of", "MongoDB", "so", "drop", "the", "collection", "instead", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/manage.py#L32-L43
train
yeraydiazdiaz/lunr.py
lunr/stop_word_filter.py
generate_stop_word_filter
def generate_stop_word_filter(stop_words, language=None): """Builds a stopWordFilter function from the provided list of stop words. The built in `stop_word_filter` is built using this factory and can be used to generate custom `stop_word_filter` for applications or non English languages. """ def stop_word_filter(token, i=None, tokens=None): if token and str(token) not in stop_words: return token # camelCased for for compatibility with lunr.js label = ( "stopWordFilter-{}".format(language) if language is not None else "stopWordFilter" ) Pipeline.register_function(stop_word_filter, label) return stop_word_filter
python
def generate_stop_word_filter(stop_words, language=None): """Builds a stopWordFilter function from the provided list of stop words. The built in `stop_word_filter` is built using this factory and can be used to generate custom `stop_word_filter` for applications or non English languages. """ def stop_word_filter(token, i=None, tokens=None): if token and str(token) not in stop_words: return token # camelCased for for compatibility with lunr.js label = ( "stopWordFilter-{}".format(language) if language is not None else "stopWordFilter" ) Pipeline.register_function(stop_word_filter, label) return stop_word_filter
[ "def", "generate_stop_word_filter", "(", "stop_words", ",", "language", "=", "None", ")", ":", "def", "stop_word_filter", "(", "token", ",", "i", "=", "None", ",", "tokens", "=", "None", ")", ":", "if", "token", "and", "str", "(", "token", ")", "not", "in", "stop_words", ":", "return", "token", "# camelCased for for compatibility with lunr.js", "label", "=", "(", "\"stopWordFilter-{}\"", ".", "format", "(", "language", ")", "if", "language", "is", "not", "None", "else", "\"stopWordFilter\"", ")", "Pipeline", ".", "register_function", "(", "stop_word_filter", ",", "label", ")", "return", "stop_word_filter" ]
Builds a stopWordFilter function from the provided list of stop words. The built in `stop_word_filter` is built using this factory and can be used to generate custom `stop_word_filter` for applications or non English languages.
[ "Builds", "a", "stopWordFilter", "function", "from", "the", "provided", "list", "of", "stop", "words", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/stop_word_filter.py#L130-L149
train
tehmaze/natural
natural/phone.py
pesn
def pesn(number, separator=u''): ''' Printable Pseudo Electronic Serial Number. :param number: hexadecimal string >>> print(pesn('1B69B4BA630F34E')) 805F9EF7 ''' number = re.sub(r'[\s-]', '', meid(number)) serial = hashlib.sha1(unhexlify(number[:14])) return separator.join(['80', serial.hexdigest()[-6:].upper()])
python
def pesn(number, separator=u''): ''' Printable Pseudo Electronic Serial Number. :param number: hexadecimal string >>> print(pesn('1B69B4BA630F34E')) 805F9EF7 ''' number = re.sub(r'[\s-]', '', meid(number)) serial = hashlib.sha1(unhexlify(number[:14])) return separator.join(['80', serial.hexdigest()[-6:].upper()])
[ "def", "pesn", "(", "number", ",", "separator", "=", "u''", ")", ":", "number", "=", "re", ".", "sub", "(", "r'[\\s-]'", ",", "''", ",", "meid", "(", "number", ")", ")", "serial", "=", "hashlib", ".", "sha1", "(", "unhexlify", "(", "number", "[", ":", "14", "]", ")", ")", "return", "separator", ".", "join", "(", "[", "'80'", ",", "serial", ".", "hexdigest", "(", ")", "[", "-", "6", ":", "]", ".", "upper", "(", ")", "]", ")" ]
Printable Pseudo Electronic Serial Number. :param number: hexadecimal string >>> print(pesn('1B69B4BA630F34E')) 805F9EF7
[ "Printable", "Pseudo", "Electronic", "Serial", "Number", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/phone.py#L234-L246
train
tehmaze/natural
natural/size.py
filesize
def filesize(value, format='decimal', digits=2): ''' Convert a file size into natural readable format. Multiple formats are supported. :param value: size :param format: default ``decimal``, choices ``binary``, ``decimal`` or ``gnu`` :param digits: default ``2`` >>> print(filesize(123)) 123.00 B >>> print(filesize(123456)) 120.56 kB >>> print(filesize(1234567890)) 1.15 GB ''' if format not in FILESIZE_SUFFIX: raise TypeError base = FILESIZE_BASE[format] size = int(value) sign = size < 0 and u'-' or '' size = abs(size) for i, suffix in enumerate(FILESIZE_SUFFIX[format]): unit = base ** (i + 1) if size < unit: result = u''.join([ sign, _format(base * size / float(unit), digits), u' ', suffix, ]) if format == 'gnu': result = result.replace(' ', '') return result raise OverflowError
python
def filesize(value, format='decimal', digits=2): ''' Convert a file size into natural readable format. Multiple formats are supported. :param value: size :param format: default ``decimal``, choices ``binary``, ``decimal`` or ``gnu`` :param digits: default ``2`` >>> print(filesize(123)) 123.00 B >>> print(filesize(123456)) 120.56 kB >>> print(filesize(1234567890)) 1.15 GB ''' if format not in FILESIZE_SUFFIX: raise TypeError base = FILESIZE_BASE[format] size = int(value) sign = size < 0 and u'-' or '' size = abs(size) for i, suffix in enumerate(FILESIZE_SUFFIX[format]): unit = base ** (i + 1) if size < unit: result = u''.join([ sign, _format(base * size / float(unit), digits), u' ', suffix, ]) if format == 'gnu': result = result.replace(' ', '') return result raise OverflowError
[ "def", "filesize", "(", "value", ",", "format", "=", "'decimal'", ",", "digits", "=", "2", ")", ":", "if", "format", "not", "in", "FILESIZE_SUFFIX", ":", "raise", "TypeError", "base", "=", "FILESIZE_BASE", "[", "format", "]", "size", "=", "int", "(", "value", ")", "sign", "=", "size", "<", "0", "and", "u'-'", "or", "''", "size", "=", "abs", "(", "size", ")", "for", "i", ",", "suffix", "in", "enumerate", "(", "FILESIZE_SUFFIX", "[", "format", "]", ")", ":", "unit", "=", "base", "**", "(", "i", "+", "1", ")", "if", "size", "<", "unit", ":", "result", "=", "u''", ".", "join", "(", "[", "sign", ",", "_format", "(", "base", "*", "size", "/", "float", "(", "unit", ")", ",", "digits", ")", ",", "u' '", ",", "suffix", ",", "]", ")", "if", "format", "==", "'gnu'", ":", "result", "=", "result", ".", "replace", "(", "' '", ",", "''", ")", "return", "result", "raise", "OverflowError" ]
Convert a file size into natural readable format. Multiple formats are supported. :param value: size :param format: default ``decimal``, choices ``binary``, ``decimal`` or ``gnu`` :param digits: default ``2`` >>> print(filesize(123)) 123.00 B >>> print(filesize(123456)) 120.56 kB >>> print(filesize(1234567890)) 1.15 GB
[ "Convert", "a", "file", "size", "into", "natural", "readable", "format", ".", "Multiple", "formats", "are", "supported", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/size.py#L12-L51
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/helper.py
create_dn_in_filter
def create_dn_in_filter(filter_class, filter_value, helper): """ Creates filter object for given class name, and DN values.""" in_filter = FilterFilter() in_filter.AddChild(create_dn_wcard_filter(filter_class, filter_value)) return in_filter
python
def create_dn_in_filter(filter_class, filter_value, helper): """ Creates filter object for given class name, and DN values.""" in_filter = FilterFilter() in_filter.AddChild(create_dn_wcard_filter(filter_class, filter_value)) return in_filter
[ "def", "create_dn_in_filter", "(", "filter_class", ",", "filter_value", ",", "helper", ")", ":", "in_filter", "=", "FilterFilter", "(", ")", "in_filter", ".", "AddChild", "(", "create_dn_wcard_filter", "(", "filter_class", ",", "filter_value", ")", ")", "return", "in_filter" ]
Creates filter object for given class name, and DN values.
[ "Creates", "filter", "object", "for", "given", "class", "name", "and", "DN", "values", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L24-L28
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/helper.py
get_managed_object
def get_managed_object(handle, class_id, params, inMo=None, in_heir=False, dump=False): """Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure. """ return handle.GetManagedObject(inMo, class_id, params, inHierarchical=in_heir, dumpXml=dump)
python
def get_managed_object(handle, class_id, params, inMo=None, in_heir=False, dump=False): """Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure. """ return handle.GetManagedObject(inMo, class_id, params, inHierarchical=in_heir, dumpXml=dump)
[ "def", "get_managed_object", "(", "handle", ",", "class_id", ",", "params", ",", "inMo", "=", "None", ",", "in_heir", "=", "False", ",", "dump", "=", "False", ")", ":", "return", "handle", ".", "GetManagedObject", "(", "inMo", ",", "class_id", ",", "params", ",", "inHierarchical", "=", "in_heir", ",", "dumpXml", "=", "dump", ")" ]
Get the specified MO from UCS Manager. :param managed_object: MO classid :in_filter: input filter value :returns: Managed Object :raises: UcsException in case of failure.
[ "Get", "the", "specified", "MO", "from", "UCS", "Manager", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L61-L72
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/helper.py
config_managed_object
def config_managed_object(p_dn, p_class_id, class_id, mo_config, mo_dn, handle=None, delete=True): """Configure the specified MO in UCS Manager. :param uuid: MO config :param p_dn: parent MO DN :param p_class_id: parent MO class ID :param class_id: MO class ID :param MO configuration: MO config :param mo_dn: MO DN value :param handle: optional UCS Manager handle object :returns: Managed Object :raises: UcsOperationError in case of failure. """ if handle is None: handle = self.handle try: result = handle.AddManagedObject(None, classId=class_id, params=mo_config, modifyPresent=True, dumpXml=YesOrNo.FALSE) return result except UcsException as ex: print(_("Cisco client exception: %(msg)s"), {'msg': ex}) raise exception.UcsOperationError('config_managed_object', error=ex)
python
def config_managed_object(p_dn, p_class_id, class_id, mo_config, mo_dn, handle=None, delete=True): """Configure the specified MO in UCS Manager. :param uuid: MO config :param p_dn: parent MO DN :param p_class_id: parent MO class ID :param class_id: MO class ID :param MO configuration: MO config :param mo_dn: MO DN value :param handle: optional UCS Manager handle object :returns: Managed Object :raises: UcsOperationError in case of failure. """ if handle is None: handle = self.handle try: result = handle.AddManagedObject(None, classId=class_id, params=mo_config, modifyPresent=True, dumpXml=YesOrNo.FALSE) return result except UcsException as ex: print(_("Cisco client exception: %(msg)s"), {'msg': ex}) raise exception.UcsOperationError('config_managed_object', error=ex)
[ "def", "config_managed_object", "(", "p_dn", ",", "p_class_id", ",", "class_id", ",", "mo_config", ",", "mo_dn", ",", "handle", "=", "None", ",", "delete", "=", "True", ")", ":", "if", "handle", "is", "None", ":", "handle", "=", "self", ".", "handle", "try", ":", "result", "=", "handle", ".", "AddManagedObject", "(", "None", ",", "classId", "=", "class_id", ",", "params", "=", "mo_config", ",", "modifyPresent", "=", "True", ",", "dumpXml", "=", "YesOrNo", ".", "FALSE", ")", "return", "result", "except", "UcsException", "as", "ex", ":", "print", "(", "_", "(", "\"Cisco client exception: %(msg)s\"", ")", ",", "{", "'msg'", ":", "ex", "}", ")", "raise", "exception", ".", "UcsOperationError", "(", "'config_managed_object'", ",", "error", "=", "ex", ")" ]
Configure the specified MO in UCS Manager. :param uuid: MO config :param p_dn: parent MO DN :param p_class_id: parent MO class ID :param class_id: MO class ID :param MO configuration: MO config :param mo_dn: MO DN value :param handle: optional UCS Manager handle object :returns: Managed Object :raises: UcsOperationError in case of failure.
[ "Configure", "the", "specified", "MO", "in", "UCS", "Manager", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L83-L109
train
jaraco/jaraco.mongodb
jaraco/mongodb/query.py
project
def project(*args, **kwargs): """ Build a projection for MongoDB. Due to https://jira.mongodb.org/browse/SERVER-3156, until MongoDB 2.6, the values must be integers and not boolean. >>> project(a=True) == {'a': 1} True Once MongoDB 2.6 is released, replace use of this function with a simple dict. """ projection = dict(*args, **kwargs) return {key: int(value) for key, value in six.iteritems(projection)}
python
def project(*args, **kwargs): """ Build a projection for MongoDB. Due to https://jira.mongodb.org/browse/SERVER-3156, until MongoDB 2.6, the values must be integers and not boolean. >>> project(a=True) == {'a': 1} True Once MongoDB 2.6 is released, replace use of this function with a simple dict. """ projection = dict(*args, **kwargs) return {key: int(value) for key, value in six.iteritems(projection)}
[ "def", "project", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "projection", "=", "dict", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "{", "key", ":", "int", "(", "value", ")", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "projection", ")", "}" ]
Build a projection for MongoDB. Due to https://jira.mongodb.org/browse/SERVER-3156, until MongoDB 2.6, the values must be integers and not boolean. >>> project(a=True) == {'a': 1} True Once MongoDB 2.6 is released, replace use of this function with a simple dict.
[ "Build", "a", "projection", "for", "MongoDB", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/query.py#L6-L20
train
jaraco/jaraco.mongodb
jaraco/mongodb/query.py
upsert_and_fetch
def upsert_and_fetch(coll, doc, **kwargs): """ Fetch exactly one matching document or upsert the document if not found, returning the matching or upserted document. See https://jira.mongodb.org/browse/SERVER-28434 describing the condition where MongoDB is uninterested in providing an upsert and fetch behavior. >>> instance = getfixture('mongodb_instance').get_connection() >>> coll = instance.test_upsert_and_fetch.items >>> doc = {'foo': 'bar'} >>> inserted = upsert_and_fetch(coll, doc) >>> inserted {...'foo': 'bar'...} >>> upsert_and_fetch(coll, doc) == inserted True """ return coll.find_one_and_update( doc, {"$setOnInsert": doc}, upsert=True, return_document=pymongo.ReturnDocument.AFTER, **kwargs )
python
def upsert_and_fetch(coll, doc, **kwargs): """ Fetch exactly one matching document or upsert the document if not found, returning the matching or upserted document. See https://jira.mongodb.org/browse/SERVER-28434 describing the condition where MongoDB is uninterested in providing an upsert and fetch behavior. >>> instance = getfixture('mongodb_instance').get_connection() >>> coll = instance.test_upsert_and_fetch.items >>> doc = {'foo': 'bar'} >>> inserted = upsert_and_fetch(coll, doc) >>> inserted {...'foo': 'bar'...} >>> upsert_and_fetch(coll, doc) == inserted True """ return coll.find_one_and_update( doc, {"$setOnInsert": doc}, upsert=True, return_document=pymongo.ReturnDocument.AFTER, **kwargs )
[ "def", "upsert_and_fetch", "(", "coll", ",", "doc", ",", "*", "*", "kwargs", ")", ":", "return", "coll", ".", "find_one_and_update", "(", "doc", ",", "{", "\"$setOnInsert\"", ":", "doc", "}", ",", "upsert", "=", "True", ",", "return_document", "=", "pymongo", ".", "ReturnDocument", ".", "AFTER", ",", "*", "*", "kwargs", ")" ]
Fetch exactly one matching document or upsert the document if not found, returning the matching or upserted document. See https://jira.mongodb.org/browse/SERVER-28434 describing the condition where MongoDB is uninterested in providing an upsert and fetch behavior. >>> instance = getfixture('mongodb_instance').get_connection() >>> coll = instance.test_upsert_and_fetch.items >>> doc = {'foo': 'bar'} >>> inserted = upsert_and_fetch(coll, doc) >>> inserted {...'foo': 'bar'...} >>> upsert_and_fetch(coll, doc) == inserted True
[ "Fetch", "exactly", "one", "matching", "document", "or", "upsert", "the", "document", "if", "not", "found", "returning", "the", "matching", "or", "upserted", "document", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/query.py#L39-L64
train
Danielhiversen/pySwitchmate
switchmate/__init__.py
Switchmate.update
def update(self, retry=2) -> None: """Synchronize state with switch.""" try: _LOGGER.debug("Updating device state.") key = ON_KEY if not self._flip_on_off else OFF_KEY self.state = self._device.readCharacteristic(HANDLE) == key except (bluepy.btle.BTLEException, AttributeError): if retry < 1 or not self._connect(): self.available = False _LOGGER.error("Failed to update device state.", exc_info=True) return None return self.update(retry-1) self.available = True return None
python
def update(self, retry=2) -> None: """Synchronize state with switch.""" try: _LOGGER.debug("Updating device state.") key = ON_KEY if not self._flip_on_off else OFF_KEY self.state = self._device.readCharacteristic(HANDLE) == key except (bluepy.btle.BTLEException, AttributeError): if retry < 1 or not self._connect(): self.available = False _LOGGER.error("Failed to update device state.", exc_info=True) return None return self.update(retry-1) self.available = True return None
[ "def", "update", "(", "self", ",", "retry", "=", "2", ")", "->", "None", ":", "try", ":", "_LOGGER", ".", "debug", "(", "\"Updating device state.\"", ")", "key", "=", "ON_KEY", "if", "not", "self", ".", "_flip_on_off", "else", "OFF_KEY", "self", ".", "state", "=", "self", ".", "_device", ".", "readCharacteristic", "(", "HANDLE", ")", "==", "key", "except", "(", "bluepy", ".", "btle", ".", "BTLEException", ",", "AttributeError", ")", ":", "if", "retry", "<", "1", "or", "not", "self", ".", "_connect", "(", ")", ":", "self", ".", "available", "=", "False", "_LOGGER", ".", "error", "(", "\"Failed to update device state.\"", ",", "exc_info", "=", "True", ")", "return", "None", "return", "self", ".", "update", "(", "retry", "-", "1", ")", "self", ".", "available", "=", "True", "return", "None" ]
Synchronize state with switch.
[ "Synchronize", "state", "with", "switch", "." ]
9563345d35d7dcc0b920b1939eea9a7897223221
https://github.com/Danielhiversen/pySwitchmate/blob/9563345d35d7dcc0b920b1939eea9a7897223221/switchmate/__init__.py#L56-L69
train
yeraydiazdiaz/lunr.py
lunr/match_data.py
MatchData.combine
def combine(self, other): """An instance of lunr.MatchData will be created for every term that matches a document. However only one instance is required in a lunr.Index~Result. This method combines metadata from another instance of MatchData with this object's metadata. """ for term in other.metadata.keys(): if term not in self.metadata: self.metadata[term] = {} fields = other.metadata[term].keys() for field in fields: if field not in self.metadata[term]: self.metadata[term][field] = {} keys = other.metadata[term][field].keys() for key in keys: if key not in self.metadata[term][field]: self.metadata[term][field][key] = other.metadata[term][field][ key ] else: self.metadata[term][field][key].extend( other.metadata[term][field][key] )
python
def combine(self, other): """An instance of lunr.MatchData will be created for every term that matches a document. However only one instance is required in a lunr.Index~Result. This method combines metadata from another instance of MatchData with this object's metadata. """ for term in other.metadata.keys(): if term not in self.metadata: self.metadata[term] = {} fields = other.metadata[term].keys() for field in fields: if field not in self.metadata[term]: self.metadata[term][field] = {} keys = other.metadata[term][field].keys() for key in keys: if key not in self.metadata[term][field]: self.metadata[term][field][key] = other.metadata[term][field][ key ] else: self.metadata[term][field][key].extend( other.metadata[term][field][key] )
[ "def", "combine", "(", "self", ",", "other", ")", ":", "for", "term", "in", "other", ".", "metadata", ".", "keys", "(", ")", ":", "if", "term", "not", "in", "self", ".", "metadata", ":", "self", ".", "metadata", "[", "term", "]", "=", "{", "}", "fields", "=", "other", ".", "metadata", "[", "term", "]", ".", "keys", "(", ")", "for", "field", "in", "fields", ":", "if", "field", "not", "in", "self", ".", "metadata", "[", "term", "]", ":", "self", ".", "metadata", "[", "term", "]", "[", "field", "]", "=", "{", "}", "keys", "=", "other", ".", "metadata", "[", "term", "]", "[", "field", "]", ".", "keys", "(", ")", "for", "key", "in", "keys", ":", "if", "key", "not", "in", "self", ".", "metadata", "[", "term", "]", "[", "field", "]", ":", "self", ".", "metadata", "[", "term", "]", "[", "field", "]", "[", "key", "]", "=", "other", ".", "metadata", "[", "term", "]", "[", "field", "]", "[", "key", "]", "else", ":", "self", ".", "metadata", "[", "term", "]", "[", "field", "]", "[", "key", "]", ".", "extend", "(", "other", ".", "metadata", "[", "term", "]", "[", "field", "]", "[", "key", "]", ")" ]
An instance of lunr.MatchData will be created for every term that matches a document. However only one instance is required in a lunr.Index~Result. This method combines metadata from another instance of MatchData with this object's metadata.
[ "An", "instance", "of", "lunr", ".", "MatchData", "will", "be", "created", "for", "every", "term", "that", "matches", "a", "document", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/match_data.py#L25-L51
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/power.py
UcsPower.get_power_state
def get_power_state(self): """Get current power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required Ucs parameters are missing :raises: UcsOperationError: on an error from Ucs. :returns: Power state of the given node """ rn_array = [self.helper.service_profile, ManagedObject(NamingId.LS_POWER).MakeRn()] try: ls_power = ucs_helper.get_managed_object( self.helper.handle, LsPower.ClassId(), {LsPower.DN: UcsUtils.MakeDn(rn_array)}) if not ls_power: raise exception.UcsOperationError("get_power_state", "Failed to get LsPower MO, configure valid " "service-profile") return ls_power[0].getattr(LsPower.STATE) except UcsException as ex: raise exception.UcsOperationError(message=ex)
python
def get_power_state(self): """Get current power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required Ucs parameters are missing :raises: UcsOperationError: on an error from Ucs. :returns: Power state of the given node """ rn_array = [self.helper.service_profile, ManagedObject(NamingId.LS_POWER).MakeRn()] try: ls_power = ucs_helper.get_managed_object( self.helper.handle, LsPower.ClassId(), {LsPower.DN: UcsUtils.MakeDn(rn_array)}) if not ls_power: raise exception.UcsOperationError("get_power_state", "Failed to get LsPower MO, configure valid " "service-profile") return ls_power[0].getattr(LsPower.STATE) except UcsException as ex: raise exception.UcsOperationError(message=ex)
[ "def", "get_power_state", "(", "self", ")", ":", "rn_array", "=", "[", "self", ".", "helper", ".", "service_profile", ",", "ManagedObject", "(", "NamingId", ".", "LS_POWER", ")", ".", "MakeRn", "(", ")", "]", "try", ":", "ls_power", "=", "ucs_helper", ".", "get_managed_object", "(", "self", ".", "helper", ".", "handle", ",", "LsPower", ".", "ClassId", "(", ")", ",", "{", "LsPower", ".", "DN", ":", "UcsUtils", ".", "MakeDn", "(", "rn_array", ")", "}", ")", "if", "not", "ls_power", ":", "raise", "exception", ".", "UcsOperationError", "(", "\"get_power_state\"", ",", "\"Failed to get LsPower MO, configure valid \"", "\"service-profile\"", ")", "return", "ls_power", "[", "0", "]", ".", "getattr", "(", "LsPower", ".", "STATE", ")", "except", "UcsException", "as", "ex", ":", "raise", "exception", ".", "UcsOperationError", "(", "message", "=", "ex", ")" ]
Get current power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required Ucs parameters are missing :raises: UcsOperationError: on an error from Ucs. :returns: Power state of the given node
[ "Get", "current", "power", "state", "of", "this", "node" ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L30-L52
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/power.py
UcsPower.set_power_state
def set_power_state(self, desired_state): """Set power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required seamicro parameters are missing. :raises: UcsOperationError on an error from UcsHandle Client. :returns: Power state of the given node """ rn_array = [self.helper.service_profile, ManagedObject(NamingId.LS_POWER).MakeRn()] try: ls_power = ucs_helper.get_managed_object(self.helper.handle, LsPower.ClassId(), {LsPower.DN: UcsUtils.MakeDn(rn_array)}) if not ls_power: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," " configure valid service-profile.") else: ls_power_set = self.helper.handle.SetManagedObject( ls_power, LsPower.ClassId(), {LsPower.STATE: desired_state}, dumpXml=YesOrNo.TRUE ) if ls_power_set: power = ls_power_set.pop() return power.getattr(LsPower.STATE) else: return states.ERROR except Exception as ex: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," "configure valid servie-profile.")
python
def set_power_state(self, desired_state): """Set power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required seamicro parameters are missing. :raises: UcsOperationError on an error from UcsHandle Client. :returns: Power state of the given node """ rn_array = [self.helper.service_profile, ManagedObject(NamingId.LS_POWER).MakeRn()] try: ls_power = ucs_helper.get_managed_object(self.helper.handle, LsPower.ClassId(), {LsPower.DN: UcsUtils.MakeDn(rn_array)}) if not ls_power: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," " configure valid service-profile.") else: ls_power_set = self.helper.handle.SetManagedObject( ls_power, LsPower.ClassId(), {LsPower.STATE: desired_state}, dumpXml=YesOrNo.TRUE ) if ls_power_set: power = ls_power_set.pop() return power.getattr(LsPower.STATE) else: return states.ERROR except Exception as ex: raise exception.UcsOperationError("set_power_state", "Failed to get power MO," "configure valid servie-profile.")
[ "def", "set_power_state", "(", "self", ",", "desired_state", ")", ":", "rn_array", "=", "[", "self", ".", "helper", ".", "service_profile", ",", "ManagedObject", "(", "NamingId", ".", "LS_POWER", ")", ".", "MakeRn", "(", ")", "]", "try", ":", "ls_power", "=", "ucs_helper", ".", "get_managed_object", "(", "self", ".", "helper", ".", "handle", ",", "LsPower", ".", "ClassId", "(", ")", ",", "{", "LsPower", ".", "DN", ":", "UcsUtils", ".", "MakeDn", "(", "rn_array", ")", "}", ")", "if", "not", "ls_power", ":", "raise", "exception", ".", "UcsOperationError", "(", "\"set_power_state\"", ",", "\"Failed to get power MO,\"", "\" configure valid service-profile.\"", ")", "else", ":", "ls_power_set", "=", "self", ".", "helper", ".", "handle", ".", "SetManagedObject", "(", "ls_power", ",", "LsPower", ".", "ClassId", "(", ")", ",", "{", "LsPower", ".", "STATE", ":", "desired_state", "}", ",", "dumpXml", "=", "YesOrNo", ".", "TRUE", ")", "if", "ls_power_set", ":", "power", "=", "ls_power_set", ".", "pop", "(", ")", "return", "power", ".", "getattr", "(", "LsPower", ".", "STATE", ")", "else", ":", "return", "states", ".", "ERROR", "except", "Exception", "as", "ex", ":", "raise", "exception", ".", "UcsOperationError", "(", "\"set_power_state\"", ",", "\"Failed to get power MO,\"", "\"configure valid servie-profile.\"", ")" ]
Set power state of this node :param node: Ironic node one of :class:`ironic.db.models.Node` :raises: InvalidParameterValue if required seamicro parameters are missing. :raises: UcsOperationError on an error from UcsHandle Client. :returns: Power state of the given node
[ "Set", "power", "state", "of", "this", "node" ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L54-L88
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/utils/power.py
UcsPower.reboot
def reboot(self): """Hard reset the power of this node. """ if self.get_power_state() == LsPower.CONST_STATE_DOWN: self.set_power_state(LsPower.CONST_STATE_UP) else: self.set_power_state(LsPower.CONST_STATE_HARD_RESET_IMMEDIATE)
python
def reboot(self): """Hard reset the power of this node. """ if self.get_power_state() == LsPower.CONST_STATE_DOWN: self.set_power_state(LsPower.CONST_STATE_UP) else: self.set_power_state(LsPower.CONST_STATE_HARD_RESET_IMMEDIATE)
[ "def", "reboot", "(", "self", ")", ":", "if", "self", ".", "get_power_state", "(", ")", "==", "LsPower", ".", "CONST_STATE_DOWN", ":", "self", ".", "set_power_state", "(", "LsPower", ".", "CONST_STATE_UP", ")", "else", ":", "self", ".", "set_power_state", "(", "LsPower", ".", "CONST_STATE_HARD_RESET_IMMEDIATE", ")" ]
Hard reset the power of this node.
[ "Hard", "reset", "the", "power", "of", "this", "node", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L90-L96
train
jaraco/jaraco.mongodb
jaraco/mongodb/helper.py
connect
def connect(uri, factory=pymongo.MongoClient): """ Use the factory to establish a connection to uri. """ warnings.warn( "do not use. Just call MongoClient directly.", DeprecationWarning) return factory(uri)
python
def connect(uri, factory=pymongo.MongoClient): """ Use the factory to establish a connection to uri. """ warnings.warn( "do not use. Just call MongoClient directly.", DeprecationWarning) return factory(uri)
[ "def", "connect", "(", "uri", ",", "factory", "=", "pymongo", ".", "MongoClient", ")", ":", "warnings", ".", "warn", "(", "\"do not use. Just call MongoClient directly.\"", ",", "DeprecationWarning", ")", "return", "factory", "(", "uri", ")" ]
Use the factory to establish a connection to uri.
[ "Use", "the", "factory", "to", "establish", "a", "connection", "to", "uri", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/helper.py#L18-L24
train
jaraco/jaraco.mongodb
jaraco/mongodb/helper.py
connect_gridfs
def connect_gridfs(uri, db=None): """ Construct a GridFS instance for a MongoDB URI. """ return gridfs.GridFS( db or connect_db(uri), collection=get_collection(uri) or 'fs', )
python
def connect_gridfs(uri, db=None): """ Construct a GridFS instance for a MongoDB URI. """ return gridfs.GridFS( db or connect_db(uri), collection=get_collection(uri) or 'fs', )
[ "def", "connect_gridfs", "(", "uri", ",", "db", "=", "None", ")", ":", "return", "gridfs", ".", "GridFS", "(", "db", "or", "connect_db", "(", "uri", ")", ",", "collection", "=", "get_collection", "(", "uri", ")", "or", "'fs'", ",", ")" ]
Construct a GridFS instance for a MongoDB URI.
[ "Construct", "a", "GridFS", "instance", "for", "a", "MongoDB", "URI", "." ]
280f17894941f4babf2e97db033dbb1fd2b9f705
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/helper.py#L66-L73
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
Compare
def Compare(fromMo, toMo, diff): """ Internal method to support CompareManagedObject functionality. """ from UcsBase import UcsUtils if (fromMo.classId != toMo.classId): return CompareStatus.TypesDifferent for prop in UcsUtils.GetUcsPropertyMetaAttributeList(str(fromMo.classId)): propMeta = UcsUtils.IsPropertyInMetaIgnoreCase(fromMo.classId, prop) if propMeta != None: if ((propMeta.access == UcsPropertyMeta.Internal) or (propMeta.access == UcsPropertyMeta.ReadOnly) or ( prop in toMo._excludePropList)): continue if ((toMo.__dict__.has_key(prop)) and (fromMo.getattr(prop) != toMo.getattr(prop))): diff.append(prop) if (len(diff) > 0): return CompareStatus.PropsDifferent return CompareStatus.Equal
python
def Compare(fromMo, toMo, diff): """ Internal method to support CompareManagedObject functionality. """ from UcsBase import UcsUtils if (fromMo.classId != toMo.classId): return CompareStatus.TypesDifferent for prop in UcsUtils.GetUcsPropertyMetaAttributeList(str(fromMo.classId)): propMeta = UcsUtils.IsPropertyInMetaIgnoreCase(fromMo.classId, prop) if propMeta != None: if ((propMeta.access == UcsPropertyMeta.Internal) or (propMeta.access == UcsPropertyMeta.ReadOnly) or ( prop in toMo._excludePropList)): continue if ((toMo.__dict__.has_key(prop)) and (fromMo.getattr(prop) != toMo.getattr(prop))): diff.append(prop) if (len(diff) > 0): return CompareStatus.PropsDifferent return CompareStatus.Equal
[ "def", "Compare", "(", "fromMo", ",", "toMo", ",", "diff", ")", ":", "from", "UcsBase", "import", "UcsUtils", "if", "(", "fromMo", ".", "classId", "!=", "toMo", ".", "classId", ")", ":", "return", "CompareStatus", ".", "TypesDifferent", "for", "prop", "in", "UcsUtils", ".", "GetUcsPropertyMetaAttributeList", "(", "str", "(", "fromMo", ".", "classId", ")", ")", ":", "propMeta", "=", "UcsUtils", ".", "IsPropertyInMetaIgnoreCase", "(", "fromMo", ".", "classId", ",", "prop", ")", "if", "propMeta", "!=", "None", ":", "if", "(", "(", "propMeta", ".", "access", "==", "UcsPropertyMeta", ".", "Internal", ")", "or", "(", "propMeta", ".", "access", "==", "UcsPropertyMeta", ".", "ReadOnly", ")", "or", "(", "prop", "in", "toMo", ".", "_excludePropList", ")", ")", ":", "continue", "if", "(", "(", "toMo", ".", "__dict__", ".", "has_key", "(", "prop", ")", ")", "and", "(", "fromMo", ".", "getattr", "(", "prop", ")", "!=", "toMo", ".", "getattr", "(", "prop", ")", ")", ")", ":", "diff", ".", "append", "(", "prop", ")", "if", "(", "len", "(", "diff", ")", ">", "0", ")", ":", "return", "CompareStatus", ".", "PropsDifferent", "return", "CompareStatus", ".", "Equal" ]
Internal method to support CompareManagedObject functionality.
[ "Internal", "method", "to", "support", "CompareManagedObject", "functionality", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2742-L2761
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
TranslateManagedObject
def TranslateManagedObject(mObj, xlateOrg, xlateMap): """ Method used to translate a managedobject. This method is used in CompareManagedObject. """ from UcsBase import UcsUtils, WriteUcsWarning from Mos import OrgOrg xMO = mObj.Clone() xMO.SetHandle(mObj.GetHandle()) if (xlateOrg != None): matchObj = re.match(r'^(org-[\-\.:_a-zA-Z0-9]{1,16}/)*org-[\-\.:_a-zA-Z0-9]{1,16}', xMO.Dn) if matchObj: if UcsUtils.WordL(xMO.classId) == OrgOrg.ClassId(): orgMoMeta = UcsUtils.GetUcsPropertyMeta(UcsUtils.WordU(OrgOrg.ClassId()), "Meta") if orgMoMeta == None: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning('[Warning]: Could not translate [%s]' % (xMO.Dn)) return xMO # Check for naming property matchObj1 = re.findall(r'(\[[^\]]+\])', orgMoMeta.rn) if matchObj1: UpdateMoDnAlongWithNamingProperties(xMO, orgMoMeta, xlateOrg) else: newDn = re.sub("%s" % (matchObj.group(0)), "%s" % (xlateOrg), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn else: newDn = re.sub("^%s/" % (matchObj.group(0)), "%s/" % (xlateOrg), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn if (xlateMap != None): originalDn = xMO.Dn if originalDn in xlateMap: xMoMeta = UcsUtils.GetUcsPropertyMeta(UcsUtils.WordU(xMO.classId), "Meta") if xMoMeta == None: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning('[Warning]: Could not translate [%s]' % (originalDn)) return xMO # Check for naming property matchObj = re.findall(r'(\[[^\]]+\])', xMoMeta.rn) if matchObj: UpdateMoDnAlongWithNamingProperties(xMO, xMoMeta, xlateMap[originalDn]) else: # print "Translating", xMO.Dn, " => ", xlateMap[originalDn] xMO.Dn = xlateMap[originalDn] else: originalDn = re.sub(r'[/]*[^/]+$', '', originalDn) while (originalDn != None or originalDn == ""): if (not (originalDn in xlateMap)): originalDn = re.sub(r'[/]*[^/]+$', '', originalDn) continue newDn = re.sub("^%s/" % (originalDn), "%s/" % (xlateMap[originalDn]), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn break return xMO
python
def TranslateManagedObject(mObj, xlateOrg, xlateMap): """ Method used to translate a managedobject. This method is used in CompareManagedObject. """ from UcsBase import UcsUtils, WriteUcsWarning from Mos import OrgOrg xMO = mObj.Clone() xMO.SetHandle(mObj.GetHandle()) if (xlateOrg != None): matchObj = re.match(r'^(org-[\-\.:_a-zA-Z0-9]{1,16}/)*org-[\-\.:_a-zA-Z0-9]{1,16}', xMO.Dn) if matchObj: if UcsUtils.WordL(xMO.classId) == OrgOrg.ClassId(): orgMoMeta = UcsUtils.GetUcsPropertyMeta(UcsUtils.WordU(OrgOrg.ClassId()), "Meta") if orgMoMeta == None: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning('[Warning]: Could not translate [%s]' % (xMO.Dn)) return xMO # Check for naming property matchObj1 = re.findall(r'(\[[^\]]+\])', orgMoMeta.rn) if matchObj1: UpdateMoDnAlongWithNamingProperties(xMO, orgMoMeta, xlateOrg) else: newDn = re.sub("%s" % (matchObj.group(0)), "%s" % (xlateOrg), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn else: newDn = re.sub("^%s/" % (matchObj.group(0)), "%s/" % (xlateOrg), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn if (xlateMap != None): originalDn = xMO.Dn if originalDn in xlateMap: xMoMeta = UcsUtils.GetUcsPropertyMeta(UcsUtils.WordU(xMO.classId), "Meta") if xMoMeta == None: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning('[Warning]: Could not translate [%s]' % (originalDn)) return xMO # Check for naming property matchObj = re.findall(r'(\[[^\]]+\])', xMoMeta.rn) if matchObj: UpdateMoDnAlongWithNamingProperties(xMO, xMoMeta, xlateMap[originalDn]) else: # print "Translating", xMO.Dn, " => ", xlateMap[originalDn] xMO.Dn = xlateMap[originalDn] else: originalDn = re.sub(r'[/]*[^/]+$', '', originalDn) while (originalDn != None or originalDn == ""): if (not (originalDn in xlateMap)): originalDn = re.sub(r'[/]*[^/]+$', '', originalDn) continue newDn = re.sub("^%s/" % (originalDn), "%s/" % (xlateMap[originalDn]), xMO.Dn) # print "Translating", xMO.Dn, " => ", newDn xMO.Dn = newDn break return xMO
[ "def", "TranslateManagedObject", "(", "mObj", ",", "xlateOrg", ",", "xlateMap", ")", ":", "from", "UcsBase", "import", "UcsUtils", ",", "WriteUcsWarning", "from", "Mos", "import", "OrgOrg", "xMO", "=", "mObj", ".", "Clone", "(", ")", "xMO", ".", "SetHandle", "(", "mObj", ".", "GetHandle", "(", ")", ")", "if", "(", "xlateOrg", "!=", "None", ")", ":", "matchObj", "=", "re", ".", "match", "(", "r'^(org-[\\-\\.:_a-zA-Z0-9]{1,16}/)*org-[\\-\\.:_a-zA-Z0-9]{1,16}'", ",", "xMO", ".", "Dn", ")", "if", "matchObj", ":", "if", "UcsUtils", ".", "WordL", "(", "xMO", ".", "classId", ")", "==", "OrgOrg", ".", "ClassId", "(", ")", ":", "orgMoMeta", "=", "UcsUtils", ".", "GetUcsPropertyMeta", "(", "UcsUtils", ".", "WordU", "(", "OrgOrg", ".", "ClassId", "(", ")", ")", ",", "\"Meta\"", ")", "if", "orgMoMeta", "==", "None", ":", "# TODO: Add Warning/Error messages in Logger.", "WriteUcsWarning", "(", "'[Warning]: Could not translate [%s]'", "%", "(", "xMO", ".", "Dn", ")", ")", "return", "xMO", "# Check for naming property", "matchObj1", "=", "re", ".", "findall", "(", "r'(\\[[^\\]]+\\])'", ",", "orgMoMeta", ".", "rn", ")", "if", "matchObj1", ":", "UpdateMoDnAlongWithNamingProperties", "(", "xMO", ",", "orgMoMeta", ",", "xlateOrg", ")", "else", ":", "newDn", "=", "re", ".", "sub", "(", "\"%s\"", "%", "(", "matchObj", ".", "group", "(", "0", ")", ")", ",", "\"%s\"", "%", "(", "xlateOrg", ")", ",", "xMO", ".", "Dn", ")", "# print \"Translating\", xMO.Dn, \" => \", newDn", "xMO", ".", "Dn", "=", "newDn", "else", ":", "newDn", "=", "re", ".", "sub", "(", "\"^%s/\"", "%", "(", "matchObj", ".", "group", "(", "0", ")", ")", ",", "\"%s/\"", "%", "(", "xlateOrg", ")", ",", "xMO", ".", "Dn", ")", "# print \"Translating\", xMO.Dn, \" => \", newDn", "xMO", ".", "Dn", "=", "newDn", "if", "(", "xlateMap", "!=", "None", ")", ":", "originalDn", "=", "xMO", ".", "Dn", "if", "originalDn", "in", "xlateMap", ":", "xMoMeta", "=", "UcsUtils", ".", "GetUcsPropertyMeta", "(", "UcsUtils", ".", "WordU", "(", "xMO", ".", "classId", ")", ",", "\"Meta\"", ")", "if", "xMoMeta", "==", "None", ":", "# TODO: Add Warning/Error messages in Logger.", "WriteUcsWarning", "(", "'[Warning]: Could not translate [%s]'", "%", "(", "originalDn", ")", ")", "return", "xMO", "# Check for naming property", "matchObj", "=", "re", ".", "findall", "(", "r'(\\[[^\\]]+\\])'", ",", "xMoMeta", ".", "rn", ")", "if", "matchObj", ":", "UpdateMoDnAlongWithNamingProperties", "(", "xMO", ",", "xMoMeta", ",", "xlateMap", "[", "originalDn", "]", ")", "else", ":", "# print \"Translating\", xMO.Dn, \" => \", xlateMap[originalDn]", "xMO", ".", "Dn", "=", "xlateMap", "[", "originalDn", "]", "else", ":", "originalDn", "=", "re", ".", "sub", "(", "r'[/]*[^/]+$'", ",", "''", ",", "originalDn", ")", "while", "(", "originalDn", "!=", "None", "or", "originalDn", "==", "\"\"", ")", ":", "if", "(", "not", "(", "originalDn", "in", "xlateMap", ")", ")", ":", "originalDn", "=", "re", ".", "sub", "(", "r'[/]*[^/]+$'", ",", "''", ",", "originalDn", ")", "continue", "newDn", "=", "re", ".", "sub", "(", "\"^%s/\"", "%", "(", "originalDn", ")", ",", "\"%s/\"", "%", "(", "xlateMap", "[", "originalDn", "]", ")", ",", "xMO", ".", "Dn", ")", "# print \"Translating\", xMO.Dn, \" => \", newDn", "xMO", ".", "Dn", "=", "newDn", "break", "return", "xMO" ]
Method used to translate a managedobject. This method is used in CompareManagedObject.
[ "Method", "used", "to", "translate", "a", "managedobject", ".", "This", "method", "is", "used", "in", "CompareManagedObject", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2764-L2822
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
ImportUcsSession
def ImportUcsSession(filePath, key): """ This operation will do a login to each UCS which is present in credential file. - filePath specifies the path of the credential file. - key specifies string used for secure encryption while ExportUcsSession operation. """ from UcsBase import UcsUtils, WriteUcsWarning, UcsValidationException # from p3 import p3_encrypt, p3_decrypt if filePath is None: raise UcsValidationException("filePath parameter is not provided.") # raise Exception('[Error]: Please provide filePath') if key is None: raise UcsValidationException("key parameter is not provided.") # raise Exception('[Error]: Please provide key') if not os.path.isfile(filePath) or not os.path.exists(filePath): raise UcsValidationException('[Error]: File <%s> does not exist ' % (filePath)) # raise Exception('[Error]: File <%s> does not exist ' %(filePath)) doc = xml.dom.minidom.parse(filePath) topNode = doc.documentElement # print topNode.localName if topNode is None or topNode.localName != UcsLoginXml.UCS_HANDLES: return None if (topNode.hasChildNodes()): # childList = topNode._get_childNodes() # childCount = childList._get_length() childList = topNode.childNodes childCount = len(childList) for i in range(childCount): childNode = childList.item(i) if (childNode.nodeType != Node.ELEMENT_NODE): continue if childNode.localName != UcsLoginXml.UCS: continue lName = None lUsername = None lPassword = None lNoSsl = False lPort = None if childNode.hasAttribute(UcsLoginXml.NAME): lName = childNode.getAttribute(UcsLoginXml.NAME) if childNode.hasAttribute(UcsLoginXml.USER_NAME): lUsername = childNode.getAttribute(UcsLoginXml.USER_NAME) if childNode.hasAttribute(UcsLoginXml.PASSWORD): # lPassword = p3_decrypt(childNode.getAttribute(UcsLoginXml.PASSWORD), key) lPassword = UcsUtils.DecryptPassword(childNode.getAttribute(UcsLoginXml.PASSWORD), key) if childNode.hasAttribute(UcsLoginXml.NO_SSL): lNoSsl = childNode.getAttribute(UcsLoginXml.NO_SSL) if childNode.hasAttribute(UcsLoginXml.PORT): lPort = childNode.getAttribute(UcsLoginXml.PORT) # Process Login if ((lName is None) or (lUsername == None) or (lPassword == None)): # WriteUcsWarning("[Warning] Insufficient information for login ...") continue try: handle = UcsHandle() handle.Login(name=lName, username=lUsername, password=lPassword, noSsl=lNoSsl, port=lPort) except Exception, err: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning("[Connection Error<%s>] %s" % (lName, str(err)))
python
def ImportUcsSession(filePath, key): """ This operation will do a login to each UCS which is present in credential file. - filePath specifies the path of the credential file. - key specifies string used for secure encryption while ExportUcsSession operation. """ from UcsBase import UcsUtils, WriteUcsWarning, UcsValidationException # from p3 import p3_encrypt, p3_decrypt if filePath is None: raise UcsValidationException("filePath parameter is not provided.") # raise Exception('[Error]: Please provide filePath') if key is None: raise UcsValidationException("key parameter is not provided.") # raise Exception('[Error]: Please provide key') if not os.path.isfile(filePath) or not os.path.exists(filePath): raise UcsValidationException('[Error]: File <%s> does not exist ' % (filePath)) # raise Exception('[Error]: File <%s> does not exist ' %(filePath)) doc = xml.dom.minidom.parse(filePath) topNode = doc.documentElement # print topNode.localName if topNode is None or topNode.localName != UcsLoginXml.UCS_HANDLES: return None if (topNode.hasChildNodes()): # childList = topNode._get_childNodes() # childCount = childList._get_length() childList = topNode.childNodes childCount = len(childList) for i in range(childCount): childNode = childList.item(i) if (childNode.nodeType != Node.ELEMENT_NODE): continue if childNode.localName != UcsLoginXml.UCS: continue lName = None lUsername = None lPassword = None lNoSsl = False lPort = None if childNode.hasAttribute(UcsLoginXml.NAME): lName = childNode.getAttribute(UcsLoginXml.NAME) if childNode.hasAttribute(UcsLoginXml.USER_NAME): lUsername = childNode.getAttribute(UcsLoginXml.USER_NAME) if childNode.hasAttribute(UcsLoginXml.PASSWORD): # lPassword = p3_decrypt(childNode.getAttribute(UcsLoginXml.PASSWORD), key) lPassword = UcsUtils.DecryptPassword(childNode.getAttribute(UcsLoginXml.PASSWORD), key) if childNode.hasAttribute(UcsLoginXml.NO_SSL): lNoSsl = childNode.getAttribute(UcsLoginXml.NO_SSL) if childNode.hasAttribute(UcsLoginXml.PORT): lPort = childNode.getAttribute(UcsLoginXml.PORT) # Process Login if ((lName is None) or (lUsername == None) or (lPassword == None)): # WriteUcsWarning("[Warning] Insufficient information for login ...") continue try: handle = UcsHandle() handle.Login(name=lName, username=lUsername, password=lPassword, noSsl=lNoSsl, port=lPort) except Exception, err: # TODO: Add Warning/Error messages in Logger. WriteUcsWarning("[Connection Error<%s>] %s" % (lName, str(err)))
[ "def", "ImportUcsSession", "(", "filePath", ",", "key", ")", ":", "from", "UcsBase", "import", "UcsUtils", ",", "WriteUcsWarning", ",", "UcsValidationException", "# from p3 import p3_encrypt, p3_decrypt", "if", "filePath", "is", "None", ":", "raise", "UcsValidationException", "(", "\"filePath parameter is not provided.\"", ")", "# raise Exception('[Error]: Please provide filePath')", "if", "key", "is", "None", ":", "raise", "UcsValidationException", "(", "\"key parameter is not provided.\"", ")", "# raise Exception('[Error]: Please provide key')", "if", "not", "os", ".", "path", ".", "isfile", "(", "filePath", ")", "or", "not", "os", ".", "path", ".", "exists", "(", "filePath", ")", ":", "raise", "UcsValidationException", "(", "'[Error]: File <%s> does not exist '", "%", "(", "filePath", ")", ")", "# raise Exception('[Error]: File <%s> does not exist ' %(filePath))", "doc", "=", "xml", ".", "dom", ".", "minidom", ".", "parse", "(", "filePath", ")", "topNode", "=", "doc", ".", "documentElement", "# print topNode.localName", "if", "topNode", "is", "None", "or", "topNode", ".", "localName", "!=", "UcsLoginXml", ".", "UCS_HANDLES", ":", "return", "None", "if", "(", "topNode", ".", "hasChildNodes", "(", ")", ")", ":", "# childList = topNode._get_childNodes()", "# childCount = childList._get_length()", "childList", "=", "topNode", ".", "childNodes", "childCount", "=", "len", "(", "childList", ")", "for", "i", "in", "range", "(", "childCount", ")", ":", "childNode", "=", "childList", ".", "item", "(", "i", ")", "if", "(", "childNode", ".", "nodeType", "!=", "Node", ".", "ELEMENT_NODE", ")", ":", "continue", "if", "childNode", ".", "localName", "!=", "UcsLoginXml", ".", "UCS", ":", "continue", "lName", "=", "None", "lUsername", "=", "None", "lPassword", "=", "None", "lNoSsl", "=", "False", "lPort", "=", "None", "if", "childNode", ".", "hasAttribute", "(", "UcsLoginXml", ".", "NAME", ")", ":", "lName", "=", "childNode", ".", "getAttribute", "(", "UcsLoginXml", ".", "NAME", ")", "if", "childNode", ".", "hasAttribute", "(", "UcsLoginXml", ".", "USER_NAME", ")", ":", "lUsername", "=", "childNode", ".", "getAttribute", "(", "UcsLoginXml", ".", "USER_NAME", ")", "if", "childNode", ".", "hasAttribute", "(", "UcsLoginXml", ".", "PASSWORD", ")", ":", "# lPassword = p3_decrypt(childNode.getAttribute(UcsLoginXml.PASSWORD), key)", "lPassword", "=", "UcsUtils", ".", "DecryptPassword", "(", "childNode", ".", "getAttribute", "(", "UcsLoginXml", ".", "PASSWORD", ")", ",", "key", ")", "if", "childNode", ".", "hasAttribute", "(", "UcsLoginXml", ".", "NO_SSL", ")", ":", "lNoSsl", "=", "childNode", ".", "getAttribute", "(", "UcsLoginXml", ".", "NO_SSL", ")", "if", "childNode", ".", "hasAttribute", "(", "UcsLoginXml", ".", "PORT", ")", ":", "lPort", "=", "childNode", ".", "getAttribute", "(", "UcsLoginXml", ".", "PORT", ")", "# Process Login", "if", "(", "(", "lName", "is", "None", ")", "or", "(", "lUsername", "==", "None", ")", "or", "(", "lPassword", "==", "None", ")", ")", ":", "# WriteUcsWarning(\"[Warning] Insufficient information for login ...\")", "continue", "try", ":", "handle", "=", "UcsHandle", "(", ")", "handle", ".", "Login", "(", "name", "=", "lName", ",", "username", "=", "lUsername", ",", "password", "=", "lPassword", ",", "noSsl", "=", "lNoSsl", ",", "port", "=", "lPort", ")", "except", "Exception", ",", "err", ":", "# TODO: Add Warning/Error messages in Logger.", "WriteUcsWarning", "(", "\"[Connection Error<%s>] %s\"", "%", "(", "lName", ",", "str", "(", "err", ")", ")", ")" ]
This operation will do a login to each UCS which is present in credential file. - filePath specifies the path of the credential file. - key specifies string used for secure encryption while ExportUcsSession operation.
[ "This", "operation", "will", "do", "a", "login", "to", "each", "UCS", "which", "is", "present", "in", "credential", "file", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2950-L3026
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.Uri
def Uri(self): """ Constructs the connection URI from name, noSsl and port instance variables. """ return ("%s://%s%s" % (("https", "http")[self._noSsl == True], self._name, (":" + str(self._port), "")[ (((self._noSsl == False) and (self._port == 80)) or ((self._noSsl == True) and (self._port == 443)))]))
python
def Uri(self): """ Constructs the connection URI from name, noSsl and port instance variables. """ return ("%s://%s%s" % (("https", "http")[self._noSsl == True], self._name, (":" + str(self._port), "")[ (((self._noSsl == False) and (self._port == 80)) or ((self._noSsl == True) and (self._port == 443)))]))
[ "def", "Uri", "(", "self", ")", ":", "return", "(", "\"%s://%s%s\"", "%", "(", "(", "\"https\"", ",", "\"http\"", ")", "[", "self", ".", "_noSsl", "==", "True", "]", ",", "self", ".", "_name", ",", "(", "\":\"", "+", "str", "(", "self", ".", "_port", ")", ",", "\"\"", ")", "[", "(", "(", "(", "self", ".", "_noSsl", "==", "False", ")", "and", "(", "self", ".", "_port", "==", "80", ")", ")", "or", "(", "(", "self", ".", "_noSsl", "==", "True", ")", "and", "(", "self", ".", "_port", "==", "443", ")", ")", ")", "]", ")", ")" ]
Constructs the connection URI from name, noSsl and port instance variables.
[ "Constructs", "the", "connection", "URI", "from", "name", "noSsl", "and", "port", "instance", "variables", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L80-L83
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.UndoTransaction
def UndoTransaction(self): """ Cancels any running transaction. """ from Ucs import ConfigMap self._transactionInProgress = False self._configMap = ConfigMap()
python
def UndoTransaction(self): """ Cancels any running transaction. """ from Ucs import ConfigMap self._transactionInProgress = False self._configMap = ConfigMap()
[ "def", "UndoTransaction", "(", "self", ")", ":", "from", "Ucs", "import", "ConfigMap", "self", ".", "_transactionInProgress", "=", "False", "self", ".", "_configMap", "=", "ConfigMap", "(", ")" ]
Cancels any running transaction.
[ "Cancels", "any", "running", "transaction", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L95-L100
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.CompleteTransaction
def CompleteTransaction(self, dumpXml=None): """ Completes a transaction. This method completes a transaction by sending the final configuration (modification queries stored in configMap) to UCS and returns the result. """ from Ucs import ConfigMap, Pair from UcsBase import ManagedObject, WriteUcsWarning, WriteObject, UcsException self._transactionInProgress = False ccm = self.ConfigConfMos(self._configMap, YesOrNo.FALSE, dumpXml) self._configMap = ConfigMap() if ccm.errorCode == 0: moList = [] for child in ccm.OutConfigs.GetChild(): if (isinstance(child, Pair) == True): for mo in child.GetChild(): moList.append(mo) elif (isinstance(child, ManagedObject) == True): moList.append(child) # WriteObject(moList) return moList else: # raise Exception('[Error]: CompleteTransaction [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) raise UcsException(ccm.errorCode, ccm.errorDescr)
python
def CompleteTransaction(self, dumpXml=None): """ Completes a transaction. This method completes a transaction by sending the final configuration (modification queries stored in configMap) to UCS and returns the result. """ from Ucs import ConfigMap, Pair from UcsBase import ManagedObject, WriteUcsWarning, WriteObject, UcsException self._transactionInProgress = False ccm = self.ConfigConfMos(self._configMap, YesOrNo.FALSE, dumpXml) self._configMap = ConfigMap() if ccm.errorCode == 0: moList = [] for child in ccm.OutConfigs.GetChild(): if (isinstance(child, Pair) == True): for mo in child.GetChild(): moList.append(mo) elif (isinstance(child, ManagedObject) == True): moList.append(child) # WriteObject(moList) return moList else: # raise Exception('[Error]: CompleteTransaction [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) raise UcsException(ccm.errorCode, ccm.errorDescr)
[ "def", "CompleteTransaction", "(", "self", ",", "dumpXml", "=", "None", ")", ":", "from", "Ucs", "import", "ConfigMap", ",", "Pair", "from", "UcsBase", "import", "ManagedObject", ",", "WriteUcsWarning", ",", "WriteObject", ",", "UcsException", "self", ".", "_transactionInProgress", "=", "False", "ccm", "=", "self", ".", "ConfigConfMos", "(", "self", ".", "_configMap", ",", "YesOrNo", ".", "FALSE", ",", "dumpXml", ")", "self", ".", "_configMap", "=", "ConfigMap", "(", ")", "if", "ccm", ".", "errorCode", "==", "0", ":", "moList", "=", "[", "]", "for", "child", "in", "ccm", ".", "OutConfigs", ".", "GetChild", "(", ")", ":", "if", "(", "isinstance", "(", "child", ",", "Pair", ")", "==", "True", ")", ":", "for", "mo", "in", "child", ".", "GetChild", "(", ")", ":", "moList", ".", "append", "(", "mo", ")", "elif", "(", "isinstance", "(", "child", ",", "ManagedObject", ")", "==", "True", ")", ":", "moList", ".", "append", "(", "child", ")", "# WriteObject(moList)", "return", "moList", "else", ":", "# raise Exception('[Error]: CompleteTransaction [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr)", "raise", "UcsException", "(", "ccm", ".", "errorCode", ",", "ccm", ".", "errorDescr", ")" ]
Completes a transaction. This method completes a transaction by sending the final configuration (modification queries stored in configMap) to UCS and returns the result.
[ "Completes", "a", "transaction", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L102-L127
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.XmlRawQuery
def XmlRawQuery(self, xml, dumpXml=None): """ Accepts xmlQuery String and returns xml response String. No object manipulation is done in this method. """ if (dumpXml == None): dumpXml = self._dumpXml uri = self.Uri() + '/nuova' if (dumpXml in _AffirmativeList): print '%s ====> %s' % (self._ucs, xml) # req = urllib2.Request(url=uri,data=xml) # f = urllib2.urlopen(req) w = xml.dom.minidom.Document() if (self._noSsl): req = urllib2.Request(url=uri, data=w.toxml()) opener = urllib2.build_opener(SmartRedirectHandler()) f = opener.open(req) # print "##", f , "##" if type(f) is list: if (len(f) == 2 and f[0] == 302): # self._noSsl = False # uri = self.Uri() + '/nuova' uri = f[1] req = urllib2.Request(url=uri, data=w.toxml()) f = urllib2.urlopen(req) # print "status code is:",f[0] # print "location is:", f[1] else: req = urllib2.Request(url=uri, data=w.toxml()) f = urllib2.urlopen(req) rsp = f.read() if (dumpXml in _AffirmativeList): print '%s <==== %s' % (self._ucs, rsp) return rsp
python
def XmlRawQuery(self, xml, dumpXml=None): """ Accepts xmlQuery String and returns xml response String. No object manipulation is done in this method. """ if (dumpXml == None): dumpXml = self._dumpXml uri = self.Uri() + '/nuova' if (dumpXml in _AffirmativeList): print '%s ====> %s' % (self._ucs, xml) # req = urllib2.Request(url=uri,data=xml) # f = urllib2.urlopen(req) w = xml.dom.minidom.Document() if (self._noSsl): req = urllib2.Request(url=uri, data=w.toxml()) opener = urllib2.build_opener(SmartRedirectHandler()) f = opener.open(req) # print "##", f , "##" if type(f) is list: if (len(f) == 2 and f[0] == 302): # self._noSsl = False # uri = self.Uri() + '/nuova' uri = f[1] req = urllib2.Request(url=uri, data=w.toxml()) f = urllib2.urlopen(req) # print "status code is:",f[0] # print "location is:", f[1] else: req = urllib2.Request(url=uri, data=w.toxml()) f = urllib2.urlopen(req) rsp = f.read() if (dumpXml in _AffirmativeList): print '%s <==== %s' % (self._ucs, rsp) return rsp
[ "def", "XmlRawQuery", "(", "self", ",", "xml", ",", "dumpXml", "=", "None", ")", ":", "if", "(", "dumpXml", "==", "None", ")", ":", "dumpXml", "=", "self", ".", "_dumpXml", "uri", "=", "self", ".", "Uri", "(", ")", "+", "'/nuova'", "if", "(", "dumpXml", "in", "_AffirmativeList", ")", ":", "print", "'%s ====> %s'", "%", "(", "self", ".", "_ucs", ",", "xml", ")", "# req = urllib2.Request(url=uri,data=xml)", "# f = urllib2.urlopen(req)", "w", "=", "xml", ".", "dom", ".", "minidom", ".", "Document", "(", ")", "if", "(", "self", ".", "_noSsl", ")", ":", "req", "=", "urllib2", ".", "Request", "(", "url", "=", "uri", ",", "data", "=", "w", ".", "toxml", "(", ")", ")", "opener", "=", "urllib2", ".", "build_opener", "(", "SmartRedirectHandler", "(", ")", ")", "f", "=", "opener", ".", "open", "(", "req", ")", "# print \"##\", f , \"##\"", "if", "type", "(", "f", ")", "is", "list", ":", "if", "(", "len", "(", "f", ")", "==", "2", "and", "f", "[", "0", "]", "==", "302", ")", ":", "# self._noSsl = False", "# uri = self.Uri() + '/nuova'", "uri", "=", "f", "[", "1", "]", "req", "=", "urllib2", ".", "Request", "(", "url", "=", "uri", ",", "data", "=", "w", ".", "toxml", "(", ")", ")", "f", "=", "urllib2", ".", "urlopen", "(", "req", ")", "# print \"status code is:\",f[0]", "# print \"location is:\", f[1]", "else", ":", "req", "=", "urllib2", ".", "Request", "(", "url", "=", "uri", ",", "data", "=", "w", ".", "toxml", "(", ")", ")", "f", "=", "urllib2", ".", "urlopen", "(", "req", ")", "rsp", "=", "f", ".", "read", "(", ")", "if", "(", "dumpXml", "in", "_AffirmativeList", ")", ":", "print", "'%s <==== %s'", "%", "(", "self", ".", "_ucs", ",", "rsp", ")", "return", "rsp" ]
Accepts xmlQuery String and returns xml response String. No object manipulation is done in this method.
[ "Accepts", "xmlQuery", "String", "and", "returns", "xml", "response", "String", ".", "No", "object", "manipulation", "is", "done", "in", "this", "method", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L268-L303
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.Logout
def Logout(self, dumpXml=None): """ Logout method disconnects from UCS. """ from UcsBase import UcsException if (self._cookie == None): return True if self._refreshTimer: self._refreshTimer.cancel() response = self.AaaLogout(dumpXml) self._cookie = None self._lastUpdateTime = str(time.asctime()) self._domains = None self._priv = None self._sessionId = None self._version = None if self._ucs in defaultUcs: del defaultUcs[self._ucs] if (response.errorCode != 0): raise UcsException(response.errorCode, response.errorDescr) # raise Exception('[Error]: Logout [Code]:' + response.errorCode + '[Description]:' + response.errorDescr) return True
python
def Logout(self, dumpXml=None): """ Logout method disconnects from UCS. """ from UcsBase import UcsException if (self._cookie == None): return True if self._refreshTimer: self._refreshTimer.cancel() response = self.AaaLogout(dumpXml) self._cookie = None self._lastUpdateTime = str(time.asctime()) self._domains = None self._priv = None self._sessionId = None self._version = None if self._ucs in defaultUcs: del defaultUcs[self._ucs] if (response.errorCode != 0): raise UcsException(response.errorCode, response.errorDescr) # raise Exception('[Error]: Logout [Code]:' + response.errorCode + '[Description]:' + response.errorDescr) return True
[ "def", "Logout", "(", "self", ",", "dumpXml", "=", "None", ")", ":", "from", "UcsBase", "import", "UcsException", "if", "(", "self", ".", "_cookie", "==", "None", ")", ":", "return", "True", "if", "self", ".", "_refreshTimer", ":", "self", ".", "_refreshTimer", ".", "cancel", "(", ")", "response", "=", "self", ".", "AaaLogout", "(", "dumpXml", ")", "self", ".", "_cookie", "=", "None", "self", ".", "_lastUpdateTime", "=", "str", "(", "time", ".", "asctime", "(", ")", ")", "self", ".", "_domains", "=", "None", "self", ".", "_priv", "=", "None", "self", ".", "_sessionId", "=", "None", "self", ".", "_version", "=", "None", "if", "self", ".", "_ucs", "in", "defaultUcs", ":", "del", "defaultUcs", "[", "self", ".", "_ucs", "]", "if", "(", "response", ".", "errorCode", "!=", "0", ")", ":", "raise", "UcsException", "(", "response", ".", "errorCode", ",", "response", ".", "errorDescr", ")", "# raise Exception('[Error]: Logout [Code]:' + response.errorCode + '[Description]:' + response.errorDescr)", "return", "True" ]
Logout method disconnects from UCS.
[ "Logout", "method", "disconnects", "from", "UCS", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L397-L422
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle._Start_refresh_timer
def _Start_refresh_timer(self): """ Internal method to support auto-refresh functionality. """ if self._refreshPeriod > 60: interval = self._refreshPeriod - 60 else: interval = 60 self._refreshTimer = Timer(self._refreshPeriod, self.Refresh) # TODO:handle exit and logout active connections. revert from daemon then self._refreshTimer.setDaemon(True) self._refreshTimer.start()
python
def _Start_refresh_timer(self): """ Internal method to support auto-refresh functionality. """ if self._refreshPeriod > 60: interval = self._refreshPeriod - 60 else: interval = 60 self._refreshTimer = Timer(self._refreshPeriod, self.Refresh) # TODO:handle exit and logout active connections. revert from daemon then self._refreshTimer.setDaemon(True) self._refreshTimer.start()
[ "def", "_Start_refresh_timer", "(", "self", ")", ":", "if", "self", ".", "_refreshPeriod", ">", "60", ":", "interval", "=", "self", ".", "_refreshPeriod", "-", "60", "else", ":", "interval", "=", "60", "self", ".", "_refreshTimer", "=", "Timer", "(", "self", ".", "_refreshPeriod", ",", "self", ".", "Refresh", ")", "# TODO:handle exit and logout active connections. revert from daemon then", "self", ".", "_refreshTimer", ".", "setDaemon", "(", "True", ")", "self", ".", "_refreshTimer", ".", "start", "(", ")" ]
Internal method to support auto-refresh functionality.
[ "Internal", "method", "to", "support", "auto", "-", "refresh", "functionality", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L424-L433
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle._start_enqueue_thread
def _start_enqueue_thread(self): """ Internal method to start the enqueue thread which adds the events in an internal queue. """ self._enqueueThreadSignal.acquire() self._enqueueThread = Thread(target=self._enqueue_function) self._enqueueThread.daemon = True self._enqueueThread.start() self._enqueueThreadSignal.wait() self._enqueueThreadSignal.release()
python
def _start_enqueue_thread(self): """ Internal method to start the enqueue thread which adds the events in an internal queue. """ self._enqueueThreadSignal.acquire() self._enqueueThread = Thread(target=self._enqueue_function) self._enqueueThread.daemon = True self._enqueueThread.start() self._enqueueThreadSignal.wait() self._enqueueThreadSignal.release()
[ "def", "_start_enqueue_thread", "(", "self", ")", ":", "self", ".", "_enqueueThreadSignal", ".", "acquire", "(", ")", "self", ".", "_enqueueThread", "=", "Thread", "(", "target", "=", "self", ".", "_enqueue_function", ")", "self", ".", "_enqueueThread", ".", "daemon", "=", "True", "self", ".", "_enqueueThread", ".", "start", "(", ")", "self", ".", "_enqueueThreadSignal", ".", "wait", "(", ")", "self", ".", "_enqueueThreadSignal", ".", "release", "(", ")" ]
Internal method to start the enqueue thread which adds the events in an internal queue.
[ "Internal", "method", "to", "start", "the", "enqueue", "thread", "which", "adds", "the", "events", "in", "an", "internal", "queue", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L562-L569
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle._add_watch_block
def _add_watch_block(self, params, filterCb, capacity=500, cb=None): """ Internal method to add a watch block for starting event monitoring. """ if (self._wbslock == None): self._wbslock = Lock() self._wbslock.acquire() wb = WatchBlock(params, filterCb, capacity, cb) # Add a List of Watchers if ((wb != None) and (wb.cb == None)): wb.cb = wb._dequeue_default_cb self._wbs.append(wb) self._wbslock.release() if self._cookie == None: return None if wb != None and len(self._wbs) == 1 and wb.params["pollSec"] == None: self._start_enqueue_thread() if self._enqueueThread == None: return wb self._enqueueThreadSignal.acquire() self._enqueueThreadSignal.notify() # Notify self._enqueueThreadSignal.release() # Release the Lock return wb
python
def _add_watch_block(self, params, filterCb, capacity=500, cb=None): """ Internal method to add a watch block for starting event monitoring. """ if (self._wbslock == None): self._wbslock = Lock() self._wbslock.acquire() wb = WatchBlock(params, filterCb, capacity, cb) # Add a List of Watchers if ((wb != None) and (wb.cb == None)): wb.cb = wb._dequeue_default_cb self._wbs.append(wb) self._wbslock.release() if self._cookie == None: return None if wb != None and len(self._wbs) == 1 and wb.params["pollSec"] == None: self._start_enqueue_thread() if self._enqueueThread == None: return wb self._enqueueThreadSignal.acquire() self._enqueueThreadSignal.notify() # Notify self._enqueueThreadSignal.release() # Release the Lock return wb
[ "def", "_add_watch_block", "(", "self", ",", "params", ",", "filterCb", ",", "capacity", "=", "500", ",", "cb", "=", "None", ")", ":", "if", "(", "self", ".", "_wbslock", "==", "None", ")", ":", "self", ".", "_wbslock", "=", "Lock", "(", ")", "self", ".", "_wbslock", ".", "acquire", "(", ")", "wb", "=", "WatchBlock", "(", "params", ",", "filterCb", ",", "capacity", ",", "cb", ")", "# Add a List of Watchers", "if", "(", "(", "wb", "!=", "None", ")", "and", "(", "wb", ".", "cb", "==", "None", ")", ")", ":", "wb", ".", "cb", "=", "wb", ".", "_dequeue_default_cb", "self", ".", "_wbs", ".", "append", "(", "wb", ")", "self", ".", "_wbslock", ".", "release", "(", ")", "if", "self", ".", "_cookie", "==", "None", ":", "return", "None", "if", "wb", "!=", "None", "and", "len", "(", "self", ".", "_wbs", ")", "==", "1", "and", "wb", ".", "params", "[", "\"pollSec\"", "]", "==", "None", ":", "self", ".", "_start_enqueue_thread", "(", ")", "if", "self", ".", "_enqueueThread", "==", "None", ":", "return", "wb", "self", ".", "_enqueueThreadSignal", ".", "acquire", "(", ")", "self", ".", "_enqueueThreadSignal", ".", "notify", "(", ")", "# Notify", "self", ".", "_enqueueThreadSignal", ".", "release", "(", ")", "# Release the Lock", "return", "wb" ]
Internal method to add a watch block for starting event monitoring.
[ "Internal", "method", "to", "add", "a", "watch", "block", "for", "starting", "event", "monitoring", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L571-L595
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle._remove_watch_block
def _remove_watch_block(self, wb): """ Internal method to remove a watch block for stopping event monitoring. """ if (self._wbslock == None): self._wbslock = Lock() self._wbslock.acquire() self._wbs.remove(wb) if len(self._wbs) == 0: self._stop_enqueue_thread() self._stop_dequeue_thread() self._wbslock.release()
python
def _remove_watch_block(self, wb): """ Internal method to remove a watch block for stopping event monitoring. """ if (self._wbslock == None): self._wbslock = Lock() self._wbslock.acquire() self._wbs.remove(wb) if len(self._wbs) == 0: self._stop_enqueue_thread() self._stop_dequeue_thread() self._wbslock.release()
[ "def", "_remove_watch_block", "(", "self", ",", "wb", ")", ":", "if", "(", "self", ".", "_wbslock", "==", "None", ")", ":", "self", ".", "_wbslock", "=", "Lock", "(", ")", "self", ".", "_wbslock", ".", "acquire", "(", ")", "self", ".", "_wbs", ".", "remove", "(", "wb", ")", "if", "len", "(", "self", ".", "_wbs", ")", "==", "0", ":", "self", ".", "_stop_enqueue_thread", "(", ")", "self", ".", "_stop_dequeue_thread", "(", ")", "self", ".", "_wbslock", ".", "release", "(", ")" ]
Internal method to remove a watch block for stopping event monitoring.
[ "Internal", "method", "to", "remove", "a", "watch", "block", "for", "stopping", "event", "monitoring", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L597-L608
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.RemoveEventHandler
def RemoveEventHandler(self, wb): """ Removes an event handler. """ from UcsBase import WriteUcsWarning if wb in self._wbs: self._remove_watch_block(wb) else: WriteUcsWarning("Event handler not found")
python
def RemoveEventHandler(self, wb): """ Removes an event handler. """ from UcsBase import WriteUcsWarning if wb in self._wbs: self._remove_watch_block(wb) else: WriteUcsWarning("Event handler not found")
[ "def", "RemoveEventHandler", "(", "self", ",", "wb", ")", ":", "from", "UcsBase", "import", "WriteUcsWarning", "if", "wb", "in", "self", ".", "_wbs", ":", "self", ".", "_remove_watch_block", "(", "wb", ")", "else", ":", "WriteUcsWarning", "(", "\"Event handler not found\"", ")" ]
Removes an event handler.
[ "Removes", "an", "event", "handler", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L718-L725
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle._start_dequeue_thread
def _start_dequeue_thread(self): """ Internal method to start dequeue thread. """ self._dequeueThread = Thread(target=self._dequeue_function) self._dequeueThread.daemon = True self._dequeueThread.start()
python
def _start_dequeue_thread(self): """ Internal method to start dequeue thread. """ self._dequeueThread = Thread(target=self._dequeue_function) self._dequeueThread.daemon = True self._dequeueThread.start()
[ "def", "_start_dequeue_thread", "(", "self", ")", ":", "self", ".", "_dequeueThread", "=", "Thread", "(", "target", "=", "self", ".", "_dequeue_function", ")", "self", ".", "_dequeueThread", ".", "daemon", "=", "True", "self", ".", "_dequeueThread", ".", "start", "(", ")" ]
Internal method to start dequeue thread.
[ "Internal", "method", "to", "start", "dequeue", "thread", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L731-L735
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.StartGuiSession
def StartGuiSession(self): """ Launches the UCSM GUI via specific UCS handle. """ from UcsBase import WriteUcsWarning, UcsUtils, UcsValidationException import urllib, tempfile, fileinput, os, subprocess, platform osSupport = ["Windows", "Linux", "Microsoft"] if platform.system() not in osSupport: raise UcsValidationException("Currently works with Windows OS and Ubuntu") # raise Exception("Currently works with Windows OS and Ubuntu") try: javawsPath = UcsUtils.GetJavaInstallationPath() # print r"%s" %(javawsPath) if javawsPath != None: url = "%s/ucsm/ucsm.jnlp" % (self.Uri()) source = urllib.urlopen(url).read() jnlpdir = tempfile.gettempdir() jnlpfile = os.path.join(jnlpdir, "temp.jnlp") if os.path.exists(jnlpfile): os.remove(jnlpfile) jnlpFH = open(jnlpfile, "w+") jnlpFH.write(source) jnlpFH.close() for line in fileinput.input(jnlpfile, inplace=1): if re.search(r'^\s*</resources>\s*$', line): print '\t<property name="log.show.encrypted" value="true"/>' print line, subprocess.call([javawsPath, jnlpfile]) if os.path.exists(jnlpfile): os.remove(jnlpfile) else: return None except Exception, err: fileinput.close() if os.path.exists(jnlpfile): os.remove(jnlpfile) raise
python
def StartGuiSession(self): """ Launches the UCSM GUI via specific UCS handle. """ from UcsBase import WriteUcsWarning, UcsUtils, UcsValidationException import urllib, tempfile, fileinput, os, subprocess, platform osSupport = ["Windows", "Linux", "Microsoft"] if platform.system() not in osSupport: raise UcsValidationException("Currently works with Windows OS and Ubuntu") # raise Exception("Currently works with Windows OS and Ubuntu") try: javawsPath = UcsUtils.GetJavaInstallationPath() # print r"%s" %(javawsPath) if javawsPath != None: url = "%s/ucsm/ucsm.jnlp" % (self.Uri()) source = urllib.urlopen(url).read() jnlpdir = tempfile.gettempdir() jnlpfile = os.path.join(jnlpdir, "temp.jnlp") if os.path.exists(jnlpfile): os.remove(jnlpfile) jnlpFH = open(jnlpfile, "w+") jnlpFH.write(source) jnlpFH.close() for line in fileinput.input(jnlpfile, inplace=1): if re.search(r'^\s*</resources>\s*$', line): print '\t<property name="log.show.encrypted" value="true"/>' print line, subprocess.call([javawsPath, jnlpfile]) if os.path.exists(jnlpfile): os.remove(jnlpfile) else: return None except Exception, err: fileinput.close() if os.path.exists(jnlpfile): os.remove(jnlpfile) raise
[ "def", "StartGuiSession", "(", "self", ")", ":", "from", "UcsBase", "import", "WriteUcsWarning", ",", "UcsUtils", ",", "UcsValidationException", "import", "urllib", ",", "tempfile", ",", "fileinput", ",", "os", ",", "subprocess", ",", "platform", "osSupport", "=", "[", "\"Windows\"", ",", "\"Linux\"", ",", "\"Microsoft\"", "]", "if", "platform", ".", "system", "(", ")", "not", "in", "osSupport", ":", "raise", "UcsValidationException", "(", "\"Currently works with Windows OS and Ubuntu\"", ")", "# raise Exception(\"Currently works with Windows OS and Ubuntu\")", "try", ":", "javawsPath", "=", "UcsUtils", ".", "GetJavaInstallationPath", "(", ")", "# print r\"%s\" %(javawsPath)", "if", "javawsPath", "!=", "None", ":", "url", "=", "\"%s/ucsm/ucsm.jnlp\"", "%", "(", "self", ".", "Uri", "(", ")", ")", "source", "=", "urllib", ".", "urlopen", "(", "url", ")", ".", "read", "(", ")", "jnlpdir", "=", "tempfile", ".", "gettempdir", "(", ")", "jnlpfile", "=", "os", ".", "path", ".", "join", "(", "jnlpdir", ",", "\"temp.jnlp\"", ")", "if", "os", ".", "path", ".", "exists", "(", "jnlpfile", ")", ":", "os", ".", "remove", "(", "jnlpfile", ")", "jnlpFH", "=", "open", "(", "jnlpfile", ",", "\"w+\"", ")", "jnlpFH", ".", "write", "(", "source", ")", "jnlpFH", ".", "close", "(", ")", "for", "line", "in", "fileinput", ".", "input", "(", "jnlpfile", ",", "inplace", "=", "1", ")", ":", "if", "re", ".", "search", "(", "r'^\\s*</resources>\\s*$'", ",", "line", ")", ":", "print", "'\\t<property name=\"log.show.encrypted\" value=\"true\"/>'", "print", "line", ",", "subprocess", ".", "call", "(", "[", "javawsPath", ",", "jnlpfile", "]", ")", "if", "os", ".", "path", ".", "exists", "(", "jnlpfile", ")", ":", "os", ".", "remove", "(", "jnlpfile", ")", "else", ":", "return", "None", "except", "Exception", ",", "err", ":", "fileinput", ".", "close", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "jnlpfile", ")", ":", "os", ".", "remove", "(", "jnlpfile", ")", "raise" ]
Launches the UCSM GUI via specific UCS handle.
[ "Launches", "the", "UCSM", "GUI", "via", "specific", "UCS", "handle", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1064-L1109
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.ImportUcsBackup
def ImportUcsBackup(self, path=None, merge=False, dumpXml=False): """ Imports backUp. This operation will upload the UCSM backup taken earlier via GUI or BackupUcs operation for all configuration, system configuration, and logical configuration files. User can perform an import while the system is up and running. - path specifies path of the backup file. - merge specifies whether to merge the backup configuration with the existing UCSM configuration. """ from UcsBase import WriteUcsWarning, UcsUtils, ManagedObject, WriteObject, UcsUtils, UcsException, \ UcsValidationException from Ucs import ConfigConfig from Mos import MgmtImporter from datetime import datetime if (self._transactionInProgress): raise UcsValidationException( "UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.") # raise Exception("UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.") if not path: raise UcsValidationException("path parameter is not provided.") # raise Exception("Please provide path") if not os.path.exists(path): raise UcsValidationException("Backup File not found <%s>" % (path)) # raise Exception("Backup File not found <%s>" %(path)) dn = None filePath = path localFile = os.path.basename(filePath) topSystem = ManagedObject(NamingId.TOP_SYSTEM) mgmtImporter = ManagedObject(NamingId.MGMT_IMPORTER) mgmtImporter.Hostname = os.environ['COMPUTERNAME'].lower() + datetime.now().strftime('%Y%m%d%H%M') dn = UcsUtils.MakeDn([topSystem.MakeRn(), mgmtImporter.MakeRn()]) mgmtImporter.Dn = dn mgmtImporter.Status = Status.CREATED mgmtImporter.RemoteFile = filePath mgmtImporter.Proto = MgmtImporter.CONST_PROTO_HTTP mgmtImporter.AdminState = MgmtImporter.CONST_ADMIN_STATE_ENABLED if merge: mgmtImporter.Action = MgmtImporter.CONST_ACTION_MERGE else: mgmtImporter.Action = MgmtImporter.CONST_ACTION_REPLACE inConfig = ConfigConfig() inConfig.AddChild(mgmtImporter) uri = "%s/operations/file-%s/importconfig.txt" % (self.Uri(), localFile) if sys.version_info < (2, 6): uploadFileHandle = open(filePath, 'rb') stream = uploadFileHandle.read() else: progress = Progress() stream = file_with_callback(filePath, 'rb', progress.update, filePath) request = urllib2.Request(uri) request.add_header('Cookie', 'ucsm-cookie=%s' % (self._cookie)) request.add_data(stream) response = urllib2.urlopen(request).read() if not response: raise UcsValidationException("Unable to upload properly.") # WriteUcsWarning("Unable to upload properly.") ccm = self.ConfigConfMo(dn=dn, inConfig=inConfig, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (ccm.errorCode != 0): raise UcsException(ccm.errorCode, ccm.errorDescr) # raise Exception('[Error]: BackupUcs [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) return ccm.OutConfig.GetChild()
python
def ImportUcsBackup(self, path=None, merge=False, dumpXml=False): """ Imports backUp. This operation will upload the UCSM backup taken earlier via GUI or BackupUcs operation for all configuration, system configuration, and logical configuration files. User can perform an import while the system is up and running. - path specifies path of the backup file. - merge specifies whether to merge the backup configuration with the existing UCSM configuration. """ from UcsBase import WriteUcsWarning, UcsUtils, ManagedObject, WriteObject, UcsUtils, UcsException, \ UcsValidationException from Ucs import ConfigConfig from Mos import MgmtImporter from datetime import datetime if (self._transactionInProgress): raise UcsValidationException( "UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.") # raise Exception("UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.") if not path: raise UcsValidationException("path parameter is not provided.") # raise Exception("Please provide path") if not os.path.exists(path): raise UcsValidationException("Backup File not found <%s>" % (path)) # raise Exception("Backup File not found <%s>" %(path)) dn = None filePath = path localFile = os.path.basename(filePath) topSystem = ManagedObject(NamingId.TOP_SYSTEM) mgmtImporter = ManagedObject(NamingId.MGMT_IMPORTER) mgmtImporter.Hostname = os.environ['COMPUTERNAME'].lower() + datetime.now().strftime('%Y%m%d%H%M') dn = UcsUtils.MakeDn([topSystem.MakeRn(), mgmtImporter.MakeRn()]) mgmtImporter.Dn = dn mgmtImporter.Status = Status.CREATED mgmtImporter.RemoteFile = filePath mgmtImporter.Proto = MgmtImporter.CONST_PROTO_HTTP mgmtImporter.AdminState = MgmtImporter.CONST_ADMIN_STATE_ENABLED if merge: mgmtImporter.Action = MgmtImporter.CONST_ACTION_MERGE else: mgmtImporter.Action = MgmtImporter.CONST_ACTION_REPLACE inConfig = ConfigConfig() inConfig.AddChild(mgmtImporter) uri = "%s/operations/file-%s/importconfig.txt" % (self.Uri(), localFile) if sys.version_info < (2, 6): uploadFileHandle = open(filePath, 'rb') stream = uploadFileHandle.read() else: progress = Progress() stream = file_with_callback(filePath, 'rb', progress.update, filePath) request = urllib2.Request(uri) request.add_header('Cookie', 'ucsm-cookie=%s' % (self._cookie)) request.add_data(stream) response = urllib2.urlopen(request).read() if not response: raise UcsValidationException("Unable to upload properly.") # WriteUcsWarning("Unable to upload properly.") ccm = self.ConfigConfMo(dn=dn, inConfig=inConfig, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (ccm.errorCode != 0): raise UcsException(ccm.errorCode, ccm.errorDescr) # raise Exception('[Error]: BackupUcs [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) return ccm.OutConfig.GetChild()
[ "def", "ImportUcsBackup", "(", "self", ",", "path", "=", "None", ",", "merge", "=", "False", ",", "dumpXml", "=", "False", ")", ":", "from", "UcsBase", "import", "WriteUcsWarning", ",", "UcsUtils", ",", "ManagedObject", ",", "WriteObject", ",", "UcsUtils", ",", "UcsException", ",", "UcsValidationException", "from", "Ucs", "import", "ConfigConfig", "from", "Mos", "import", "MgmtImporter", "from", "datetime", "import", "datetime", "if", "(", "self", ".", "_transactionInProgress", ")", ":", "raise", "UcsValidationException", "(", "\"UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.\"", ")", "# raise Exception(\"UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.\")", "if", "not", "path", ":", "raise", "UcsValidationException", "(", "\"path parameter is not provided.\"", ")", "# raise Exception(\"Please provide path\")", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "UcsValidationException", "(", "\"Backup File not found <%s>\"", "%", "(", "path", ")", ")", "# raise Exception(\"Backup File not found <%s>\" %(path))", "dn", "=", "None", "filePath", "=", "path", "localFile", "=", "os", ".", "path", ".", "basename", "(", "filePath", ")", "topSystem", "=", "ManagedObject", "(", "NamingId", ".", "TOP_SYSTEM", ")", "mgmtImporter", "=", "ManagedObject", "(", "NamingId", ".", "MGMT_IMPORTER", ")", "mgmtImporter", ".", "Hostname", "=", "os", ".", "environ", "[", "'COMPUTERNAME'", "]", ".", "lower", "(", ")", "+", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y%m%d%H%M'", ")", "dn", "=", "UcsUtils", ".", "MakeDn", "(", "[", "topSystem", ".", "MakeRn", "(", ")", ",", "mgmtImporter", ".", "MakeRn", "(", ")", "]", ")", "mgmtImporter", ".", "Dn", "=", "dn", "mgmtImporter", ".", "Status", "=", "Status", ".", "CREATED", "mgmtImporter", ".", "RemoteFile", "=", "filePath", "mgmtImporter", ".", "Proto", "=", "MgmtImporter", ".", "CONST_PROTO_HTTP", "mgmtImporter", ".", "AdminState", "=", "MgmtImporter", ".", "CONST_ADMIN_STATE_ENABLED", "if", "merge", ":", "mgmtImporter", ".", "Action", "=", "MgmtImporter", ".", "CONST_ACTION_MERGE", "else", ":", "mgmtImporter", ".", "Action", "=", "MgmtImporter", ".", "CONST_ACTION_REPLACE", "inConfig", "=", "ConfigConfig", "(", ")", "inConfig", ".", "AddChild", "(", "mgmtImporter", ")", "uri", "=", "\"%s/operations/file-%s/importconfig.txt\"", "%", "(", "self", ".", "Uri", "(", ")", ",", "localFile", ")", "if", "sys", ".", "version_info", "<", "(", "2", ",", "6", ")", ":", "uploadFileHandle", "=", "open", "(", "filePath", ",", "'rb'", ")", "stream", "=", "uploadFileHandle", ".", "read", "(", ")", "else", ":", "progress", "=", "Progress", "(", ")", "stream", "=", "file_with_callback", "(", "filePath", ",", "'rb'", ",", "progress", ".", "update", ",", "filePath", ")", "request", "=", "urllib2", ".", "Request", "(", "uri", ")", "request", ".", "add_header", "(", "'Cookie'", ",", "'ucsm-cookie=%s'", "%", "(", "self", ".", "_cookie", ")", ")", "request", ".", "add_data", "(", "stream", ")", "response", "=", "urllib2", ".", "urlopen", "(", "request", ")", ".", "read", "(", ")", "if", "not", "response", ":", "raise", "UcsValidationException", "(", "\"Unable to upload properly.\"", ")", "# WriteUcsWarning(\"Unable to upload properly.\")", "ccm", "=", "self", ".", "ConfigConfMo", "(", "dn", "=", "dn", ",", "inConfig", "=", "inConfig", ",", "inHierarchical", "=", "YesOrNo", ".", "FALSE", ",", "dumpXml", "=", "dumpXml", ")", "if", "(", "ccm", ".", "errorCode", "!=", "0", ")", ":", "raise", "UcsException", "(", "ccm", ".", "errorCode", ",", "ccm", ".", "errorDescr", ")", "# raise Exception('[Error]: BackupUcs [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr)", "return", "ccm", ".", "OutConfig", ".", "GetChild", "(", ")" ]
Imports backUp. This operation will upload the UCSM backup taken earlier via GUI or BackupUcs operation for all configuration, system configuration, and logical configuration files. User can perform an import while the system is up and running. - path specifies path of the backup file. - merge specifies whether to merge the backup configuration with the existing UCSM configuration.
[ "Imports", "backUp", ".", "This", "operation", "will", "upload", "the", "UCSM", "backup", "taken", "earlier", "via", "GUI", "or", "BackupUcs", "operation", "for", "all", "configuration", "system", "configuration", "and", "logical", "configuration", "files", ".", "User", "can", "perform", "an", "import", "while", "the", "system", "is", "up", "and", "running", ".", "-", "path", "specifies", "path", "of", "the", "backup", "file", ".", "-", "merge", "specifies", "whether", "to", "merge", "the", "backup", "configuration", "with", "the", "existing", "UCSM", "configuration", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1224-L1301
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.SendUcsFirmware
def SendUcsFirmware(self, path=None, dumpXml=False): """ Uploads a specific CCO Image on UCS. - path specifies the path of the image to be uploaded. """ from UcsBase import WriteUcsWarning, UcsUtils, ManagedObject, WriteObject, UcsUtils, UcsValidationException, \ UcsException from Ucs import ConfigConfig from Mos import FirmwareDownloader if (self._transactionInProgress): raise UcsValidationException( "UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.") # raise Exception("UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.") if not path: raise UcsValidationException("path parameter is not provided.") # raise Exception("Please provide path") if not os.path.exists(path): raise UcsValidationException("Image not found <%s>" % (path)) # raise Exception("Image not found <%s>" %(path)) dn = None filePath = path localFile = os.path.basename(filePath) # Exit if image already exist on UCSM topSystem = ManagedObject(NamingId.TOP_SYSTEM) firmwareCatalogue = ManagedObject(NamingId.FIRMWARE_CATALOGUE) firmwareDistributable = ManagedObject(NamingId.FIRMWARE_DISTRIBUTABLE) firmwareDistributable.Name = localFile dn = UcsUtils.MakeDn([topSystem.MakeRn(), firmwareCatalogue.MakeRn(), firmwareDistributable.MakeRn()]) crDn = self.ConfigResolveDn(dn, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (crDn.OutConfig.GetChildCount() > 0): raise UcsValidationException("Image file <%s> already exist on FI." % (filePath)) # raise Exception("Image file <%s> already exist on FI." %(filePath)) # Create object of type <firmwareDownloader> firmwareDownloader = ManagedObject(NamingId.FIRMWARE_DOWNLOADER) firmwareDownloader.FileName = localFile dn = UcsUtils.MakeDn([topSystem.MakeRn(), firmwareCatalogue.MakeRn(), firmwareDownloader.MakeRn()]) firmwareDownloader.Dn = dn firmwareDownloader.Status = Status.CREATED firmwareDownloader.FileName = localFile firmwareDownloader.Server = FirmwareDownloader.CONST_PROTOCOL_LOCAL firmwareDownloader.Protocol = FirmwareDownloader.CONST_PROTOCOL_LOCAL inConfig = ConfigConfig() inConfig.AddChild(firmwareDownloader) uri = "%s/operations/file-%s/image.txt" % (self.Uri(), localFile) progress = Progress() stream = file_with_callback(filePath, 'rb', progress.update, filePath) request = urllib2.Request(uri) request.add_header('Cookie', 'ucsm-cookie=%s' % (self._cookie)) request.add_data(stream) response = urllib2.urlopen(request).read() if not response: raise UcsValidationException("Unable to upload properly.") # WriteUcsWarning("Unable to upload properly.") ccm = self.ConfigConfMo(dn=dn, inConfig=inConfig, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (ccm.errorCode != 0): raise UcsException(ccm.errorCode, ccm.errorDescr) return ccm.OutConfig.GetChild()
python
def SendUcsFirmware(self, path=None, dumpXml=False): """ Uploads a specific CCO Image on UCS. - path specifies the path of the image to be uploaded. """ from UcsBase import WriteUcsWarning, UcsUtils, ManagedObject, WriteObject, UcsUtils, UcsValidationException, \ UcsException from Ucs import ConfigConfig from Mos import FirmwareDownloader if (self._transactionInProgress): raise UcsValidationException( "UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.") # raise Exception("UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.") if not path: raise UcsValidationException("path parameter is not provided.") # raise Exception("Please provide path") if not os.path.exists(path): raise UcsValidationException("Image not found <%s>" % (path)) # raise Exception("Image not found <%s>" %(path)) dn = None filePath = path localFile = os.path.basename(filePath) # Exit if image already exist on UCSM topSystem = ManagedObject(NamingId.TOP_SYSTEM) firmwareCatalogue = ManagedObject(NamingId.FIRMWARE_CATALOGUE) firmwareDistributable = ManagedObject(NamingId.FIRMWARE_DISTRIBUTABLE) firmwareDistributable.Name = localFile dn = UcsUtils.MakeDn([topSystem.MakeRn(), firmwareCatalogue.MakeRn(), firmwareDistributable.MakeRn()]) crDn = self.ConfigResolveDn(dn, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (crDn.OutConfig.GetChildCount() > 0): raise UcsValidationException("Image file <%s> already exist on FI." % (filePath)) # raise Exception("Image file <%s> already exist on FI." %(filePath)) # Create object of type <firmwareDownloader> firmwareDownloader = ManagedObject(NamingId.FIRMWARE_DOWNLOADER) firmwareDownloader.FileName = localFile dn = UcsUtils.MakeDn([topSystem.MakeRn(), firmwareCatalogue.MakeRn(), firmwareDownloader.MakeRn()]) firmwareDownloader.Dn = dn firmwareDownloader.Status = Status.CREATED firmwareDownloader.FileName = localFile firmwareDownloader.Server = FirmwareDownloader.CONST_PROTOCOL_LOCAL firmwareDownloader.Protocol = FirmwareDownloader.CONST_PROTOCOL_LOCAL inConfig = ConfigConfig() inConfig.AddChild(firmwareDownloader) uri = "%s/operations/file-%s/image.txt" % (self.Uri(), localFile) progress = Progress() stream = file_with_callback(filePath, 'rb', progress.update, filePath) request = urllib2.Request(uri) request.add_header('Cookie', 'ucsm-cookie=%s' % (self._cookie)) request.add_data(stream) response = urllib2.urlopen(request).read() if not response: raise UcsValidationException("Unable to upload properly.") # WriteUcsWarning("Unable to upload properly.") ccm = self.ConfigConfMo(dn=dn, inConfig=inConfig, inHierarchical=YesOrNo.FALSE, dumpXml=dumpXml) if (ccm.errorCode != 0): raise UcsException(ccm.errorCode, ccm.errorDescr) return ccm.OutConfig.GetChild()
[ "def", "SendUcsFirmware", "(", "self", ",", "path", "=", "None", ",", "dumpXml", "=", "False", ")", ":", "from", "UcsBase", "import", "WriteUcsWarning", ",", "UcsUtils", ",", "ManagedObject", ",", "WriteObject", ",", "UcsUtils", ",", "UcsValidationException", ",", "UcsException", "from", "Ucs", "import", "ConfigConfig", "from", "Mos", "import", "FirmwareDownloader", "if", "(", "self", ".", "_transactionInProgress", ")", ":", "raise", "UcsValidationException", "(", "\"UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.\"", ")", "# raise Exception(\"UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.\")", "if", "not", "path", ":", "raise", "UcsValidationException", "(", "\"path parameter is not provided.\"", ")", "# raise Exception(\"Please provide path\")", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "UcsValidationException", "(", "\"Image not found <%s>\"", "%", "(", "path", ")", ")", "# raise Exception(\"Image not found <%s>\" %(path))\t", "dn", "=", "None", "filePath", "=", "path", "localFile", "=", "os", ".", "path", ".", "basename", "(", "filePath", ")", "# Exit if image already exist on UCSM ", "topSystem", "=", "ManagedObject", "(", "NamingId", ".", "TOP_SYSTEM", ")", "firmwareCatalogue", "=", "ManagedObject", "(", "NamingId", ".", "FIRMWARE_CATALOGUE", ")", "firmwareDistributable", "=", "ManagedObject", "(", "NamingId", ".", "FIRMWARE_DISTRIBUTABLE", ")", "firmwareDistributable", ".", "Name", "=", "localFile", "dn", "=", "UcsUtils", ".", "MakeDn", "(", "[", "topSystem", ".", "MakeRn", "(", ")", ",", "firmwareCatalogue", ".", "MakeRn", "(", ")", ",", "firmwareDistributable", ".", "MakeRn", "(", ")", "]", ")", "crDn", "=", "self", ".", "ConfigResolveDn", "(", "dn", ",", "inHierarchical", "=", "YesOrNo", ".", "FALSE", ",", "dumpXml", "=", "dumpXml", ")", "if", "(", "crDn", ".", "OutConfig", ".", "GetChildCount", "(", ")", ">", "0", ")", ":", "raise", "UcsValidationException", "(", "\"Image file <%s> already exist on FI.\"", "%", "(", "filePath", ")", ")", "# raise Exception(\"Image file <%s> already exist on FI.\" %(filePath))", "# Create object of type <firmwareDownloader>", "firmwareDownloader", "=", "ManagedObject", "(", "NamingId", ".", "FIRMWARE_DOWNLOADER", ")", "firmwareDownloader", ".", "FileName", "=", "localFile", "dn", "=", "UcsUtils", ".", "MakeDn", "(", "[", "topSystem", ".", "MakeRn", "(", ")", ",", "firmwareCatalogue", ".", "MakeRn", "(", ")", ",", "firmwareDownloader", ".", "MakeRn", "(", ")", "]", ")", "firmwareDownloader", ".", "Dn", "=", "dn", "firmwareDownloader", ".", "Status", "=", "Status", ".", "CREATED", "firmwareDownloader", ".", "FileName", "=", "localFile", "firmwareDownloader", ".", "Server", "=", "FirmwareDownloader", ".", "CONST_PROTOCOL_LOCAL", "firmwareDownloader", ".", "Protocol", "=", "FirmwareDownloader", ".", "CONST_PROTOCOL_LOCAL", "inConfig", "=", "ConfigConfig", "(", ")", "inConfig", ".", "AddChild", "(", "firmwareDownloader", ")", "uri", "=", "\"%s/operations/file-%s/image.txt\"", "%", "(", "self", ".", "Uri", "(", ")", ",", "localFile", ")", "progress", "=", "Progress", "(", ")", "stream", "=", "file_with_callback", "(", "filePath", ",", "'rb'", ",", "progress", ".", "update", ",", "filePath", ")", "request", "=", "urllib2", ".", "Request", "(", "uri", ")", "request", ".", "add_header", "(", "'Cookie'", ",", "'ucsm-cookie=%s'", "%", "(", "self", ".", "_cookie", ")", ")", "request", ".", "add_data", "(", "stream", ")", "response", "=", "urllib2", ".", "urlopen", "(", "request", ")", ".", "read", "(", ")", "if", "not", "response", ":", "raise", "UcsValidationException", "(", "\"Unable to upload properly.\"", ")", "# WriteUcsWarning(\"Unable to upload properly.\")", "ccm", "=", "self", ".", "ConfigConfMo", "(", "dn", "=", "dn", ",", "inConfig", "=", "inConfig", ",", "inHierarchical", "=", "YesOrNo", ".", "FALSE", ",", "dumpXml", "=", "dumpXml", ")", "if", "(", "ccm", ".", "errorCode", "!=", "0", ")", ":", "raise", "UcsException", "(", "ccm", ".", "errorCode", ",", "ccm", ".", "errorDescr", ")", "return", "ccm", ".", "OutConfig", ".", "GetChild", "(", ")" ]
Uploads a specific CCO Image on UCS. - path specifies the path of the image to be uploaded.
[ "Uploads", "a", "specific", "CCO", "Image", "on", "UCS", ".", "-", "path", "specifies", "the", "path", "of", "the", "image", "to", "be", "uploaded", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1303-L1375
train
CiscoUcs/UcsPythonSDK
src/UcsSdk/UcsHandle_Edit.py
UcsHandle.GetUcsChild
def GetUcsChild(self, inMo=None, inDn=None, classId=None, inHierarchical=False, dumpXml=None): """ Gets Child Managed Object from UCS. - in_mo, if provided, it acts as a parent for the present operation. (required if in_dn is None). - in_dn, parent mo dn (required if in_mo is None) - class_id of the managed object/s to get.(optional) - in_hierarchical, Explores hierarchy if true, else returns managed objects at a single level.(optional) """ from UcsBase import UcsValidationException, UcsException, UcsUtils if not inDn and not inMo: raise UcsValidationException('[Error]: get_ucs_child: Provide in_mo or in_dn.') if inMo: parentDn = inMo.getattr("Dn") elif inDn: parentDn = inDn crc = self.ConfigResolveChildren(classId, parentDn, None, inHierarchical, dumpXml) if crc.errorCode == 0: moList = UcsUtils.extractMolistFromMethodResponse(crc, inHierarchical) return moList else: raise UcsException(crc.errorCode, crc.error_descr)
python
def GetUcsChild(self, inMo=None, inDn=None, classId=None, inHierarchical=False, dumpXml=None): """ Gets Child Managed Object from UCS. - in_mo, if provided, it acts as a parent for the present operation. (required if in_dn is None). - in_dn, parent mo dn (required if in_mo is None) - class_id of the managed object/s to get.(optional) - in_hierarchical, Explores hierarchy if true, else returns managed objects at a single level.(optional) """ from UcsBase import UcsValidationException, UcsException, UcsUtils if not inDn and not inMo: raise UcsValidationException('[Error]: get_ucs_child: Provide in_mo or in_dn.') if inMo: parentDn = inMo.getattr("Dn") elif inDn: parentDn = inDn crc = self.ConfigResolveChildren(classId, parentDn, None, inHierarchical, dumpXml) if crc.errorCode == 0: moList = UcsUtils.extractMolistFromMethodResponse(crc, inHierarchical) return moList else: raise UcsException(crc.errorCode, crc.error_descr)
[ "def", "GetUcsChild", "(", "self", ",", "inMo", "=", "None", ",", "inDn", "=", "None", ",", "classId", "=", "None", ",", "inHierarchical", "=", "False", ",", "dumpXml", "=", "None", ")", ":", "from", "UcsBase", "import", "UcsValidationException", ",", "UcsException", ",", "UcsUtils", "if", "not", "inDn", "and", "not", "inMo", ":", "raise", "UcsValidationException", "(", "'[Error]: get_ucs_child: Provide in_mo or in_dn.'", ")", "if", "inMo", ":", "parentDn", "=", "inMo", ".", "getattr", "(", "\"Dn\"", ")", "elif", "inDn", ":", "parentDn", "=", "inDn", "crc", "=", "self", ".", "ConfigResolveChildren", "(", "classId", ",", "parentDn", ",", "None", ",", "inHierarchical", ",", "dumpXml", ")", "if", "crc", ".", "errorCode", "==", "0", ":", "moList", "=", "UcsUtils", ".", "extractMolistFromMethodResponse", "(", "crc", ",", "inHierarchical", ")", "return", "moList", "else", ":", "raise", "UcsException", "(", "crc", ".", "errorCode", ",", "crc", ".", "error_descr", ")" ]
Gets Child Managed Object from UCS. - in_mo, if provided, it acts as a parent for the present operation. (required if in_dn is None). - in_dn, parent mo dn (required if in_mo is None) - class_id of the managed object/s to get.(optional) - in_hierarchical, Explores hierarchy if true, else returns managed objects at a single level.(optional)
[ "Gets", "Child", "Managed", "Object", "from", "UCS", "." ]
bf6b07d6abeacb922c92b198352eda4eb9e4629b
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1810-L1835
train
tehmaze/natural
natural/text.py
code
def code(sentence, pad=' ', format='army'): ''' Transform a sentence using the code spelling alphabet, multiple international code alphabets are supported. ====== ==================================================================== format description ====== ==================================================================== army US (international) army code alphabet faa Federal Aviation Administration code alphabet, as described in "ICAO Phonetics in the FAA ATC Manual, §2-4-16" icao International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" itu International Telecommunication Union Roman alphabet, as described in "ITU Phonetic Alphabet and Figure Code" morse International morse code, as described in "International Morse code Recommendation ITU-R M.1677-1", http://itu.int/ word International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" ====== ==================================================================== :param sentence: input sentence :param pad: default ``None`` (reside to per-alphabet defaults) :param format: default ``army`` >>> print(code('Python')) PAH pah YANG kee TANG go HO tell OSS car NOH vem ber >>> print(code('Python', format='faa')) PAHPAH YANGKEY TANGGO HOHTELL OSSCAH NOVEMBER >>> print(code('Python', format='icao')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='itu')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='morse')) .--. -.-- - .... --- -. >>> print(code('Python', format='word')) papa yankee tango hotel oscar november ''' try: return ALPHABET['code'][format](sentence, pad or CODE_PADDING[format]) except KeyError: raise TypeError('Unsupported code alphabet "%s"' % (format,))
python
def code(sentence, pad=' ', format='army'): ''' Transform a sentence using the code spelling alphabet, multiple international code alphabets are supported. ====== ==================================================================== format description ====== ==================================================================== army US (international) army code alphabet faa Federal Aviation Administration code alphabet, as described in "ICAO Phonetics in the FAA ATC Manual, §2-4-16" icao International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" itu International Telecommunication Union Roman alphabet, as described in "ITU Phonetic Alphabet and Figure Code" morse International morse code, as described in "International Morse code Recommendation ITU-R M.1677-1", http://itu.int/ word International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" ====== ==================================================================== :param sentence: input sentence :param pad: default ``None`` (reside to per-alphabet defaults) :param format: default ``army`` >>> print(code('Python')) PAH pah YANG kee TANG go HO tell OSS car NOH vem ber >>> print(code('Python', format='faa')) PAHPAH YANGKEY TANGGO HOHTELL OSSCAH NOVEMBER >>> print(code('Python', format='icao')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='itu')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='morse')) .--. -.-- - .... --- -. >>> print(code('Python', format='word')) papa yankee tango hotel oscar november ''' try: return ALPHABET['code'][format](sentence, pad or CODE_PADDING[format]) except KeyError: raise TypeError('Unsupported code alphabet "%s"' % (format,))
[ "def", "code", "(", "sentence", ",", "pad", "=", "' '", ",", "format", "=", "'army'", ")", ":", "try", ":", "return", "ALPHABET", "[", "'code'", "]", "[", "format", "]", "(", "sentence", ",", "pad", "or", "CODE_PADDING", "[", "format", "]", ")", "except", "KeyError", ":", "raise", "TypeError", "(", "'Unsupported code alphabet \"%s\"'", "%", "(", "format", ",", ")", ")" ]
Transform a sentence using the code spelling alphabet, multiple international code alphabets are supported. ====== ==================================================================== format description ====== ==================================================================== army US (international) army code alphabet faa Federal Aviation Administration code alphabet, as described in "ICAO Phonetics in the FAA ATC Manual, §2-4-16" icao International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" itu International Telecommunication Union Roman alphabet, as described in "ITU Phonetic Alphabet and Figure Code" morse International morse code, as described in "International Morse code Recommendation ITU-R M.1677-1", http://itu.int/ word International Civil Aviation Organization, as described in "Annex 10 to the Convention on International Civil Aviation, Volume II (Fifth edition, 1995), Chapter 5, 38–40" ====== ==================================================================== :param sentence: input sentence :param pad: default ``None`` (reside to per-alphabet defaults) :param format: default ``army`` >>> print(code('Python')) PAH pah YANG kee TANG go HO tell OSS car NOH vem ber >>> print(code('Python', format='faa')) PAHPAH YANGKEY TANGGO HOHTELL OSSCAH NOVEMBER >>> print(code('Python', format='icao')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='itu')) PAH PAH YANG KEY TANG GO HOH TELL OSS CAH NO VEM BER >>> print(code('Python', format='morse')) .--. -.-- - .... --- -. >>> print(code('Python', format='word')) papa yankee tango hotel oscar november
[ "Transform", "a", "sentence", "using", "the", "code", "spelling", "alphabet", "multiple", "international", "code", "alphabets", "are", "supported", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/text.py#L73-L121
train
tehmaze/natural
natural/text.py
nato
def nato(sentence, pad=' ', format='telephony'): ''' Transform a sentence using the NATO spelling alphabet. :param sentence: input sentence :param pad: default ``' '`` :param format: default ``telephony``, options ``telephony`` or ``phonetic`` >>> print(nato('Python')) papa yankee tango hotel oscar november >>> print(nato('Python', format='phonetic')) pah-pah yang-key tang-go hoh-tel oss-cah no-vem-ber ''' try: return '' + ALPHABET['nato'][format](sentence, pad) except KeyError: raise TypeError('Unsupported NATO alphabet "%s"' % (format,))
python
def nato(sentence, pad=' ', format='telephony'): ''' Transform a sentence using the NATO spelling alphabet. :param sentence: input sentence :param pad: default ``' '`` :param format: default ``telephony``, options ``telephony`` or ``phonetic`` >>> print(nato('Python')) papa yankee tango hotel oscar november >>> print(nato('Python', format='phonetic')) pah-pah yang-key tang-go hoh-tel oss-cah no-vem-ber ''' try: return '' + ALPHABET['nato'][format](sentence, pad) except KeyError: raise TypeError('Unsupported NATO alphabet "%s"' % (format,))
[ "def", "nato", "(", "sentence", ",", "pad", "=", "' '", ",", "format", "=", "'telephony'", ")", ":", "try", ":", "return", "''", "+", "ALPHABET", "[", "'nato'", "]", "[", "format", "]", "(", "sentence", ",", "pad", ")", "except", "KeyError", ":", "raise", "TypeError", "(", "'Unsupported NATO alphabet \"%s\"'", "%", "(", "format", ",", ")", ")" ]
Transform a sentence using the NATO spelling alphabet. :param sentence: input sentence :param pad: default ``' '`` :param format: default ``telephony``, options ``telephony`` or ``phonetic`` >>> print(nato('Python')) papa yankee tango hotel oscar november >>> print(nato('Python', format='phonetic')) pah-pah yang-key tang-go hoh-tel oss-cah no-vem-ber
[ "Transform", "a", "sentence", "using", "the", "NATO", "spelling", "alphabet", "." ]
d7a1fc9de712f9bcf68884a80826a7977df356fb
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/text.py#L135-L153
train
yeraydiazdiaz/lunr.py
lunr/query.py
Query.clause
def clause(self, *args, **kwargs): """Adds a `lunr.Clause` to this query. Unless the clause contains the fields to be matched all fields will be matched. In addition a default boost of 1 is applied to the clause. If the first argument is a `lunr.Clause` it will be mutated and added, otherwise args and kwargs will be used in the constructor. Returns: lunr.Query: The Query itself. """ if args and isinstance(args[0], Clause): clause = args[0] else: clause = Clause(*args, **kwargs) if not clause.fields: clause.fields = self.all_fields if (clause.wildcard & Query.WILDCARD_LEADING) and ( clause.term[0] != Query.WILDCARD ): clause.term = Query.WILDCARD + clause.term if (clause.wildcard & Query.WILDCARD_TRAILING) and ( clause.term[-1] != Query.WILDCARD ): clause.term = clause.term + Query.WILDCARD self.clauses.append(clause) return self
python
def clause(self, *args, **kwargs): """Adds a `lunr.Clause` to this query. Unless the clause contains the fields to be matched all fields will be matched. In addition a default boost of 1 is applied to the clause. If the first argument is a `lunr.Clause` it will be mutated and added, otherwise args and kwargs will be used in the constructor. Returns: lunr.Query: The Query itself. """ if args and isinstance(args[0], Clause): clause = args[0] else: clause = Clause(*args, **kwargs) if not clause.fields: clause.fields = self.all_fields if (clause.wildcard & Query.WILDCARD_LEADING) and ( clause.term[0] != Query.WILDCARD ): clause.term = Query.WILDCARD + clause.term if (clause.wildcard & Query.WILDCARD_TRAILING) and ( clause.term[-1] != Query.WILDCARD ): clause.term = clause.term + Query.WILDCARD self.clauses.append(clause) return self
[ "def", "clause", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "args", "and", "isinstance", "(", "args", "[", "0", "]", ",", "Clause", ")", ":", "clause", "=", "args", "[", "0", "]", "else", ":", "clause", "=", "Clause", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "clause", ".", "fields", ":", "clause", ".", "fields", "=", "self", ".", "all_fields", "if", "(", "clause", ".", "wildcard", "&", "Query", ".", "WILDCARD_LEADING", ")", "and", "(", "clause", ".", "term", "[", "0", "]", "!=", "Query", ".", "WILDCARD", ")", ":", "clause", ".", "term", "=", "Query", ".", "WILDCARD", "+", "clause", ".", "term", "if", "(", "clause", ".", "wildcard", "&", "Query", ".", "WILDCARD_TRAILING", ")", "and", "(", "clause", ".", "term", "[", "-", "1", "]", "!=", "Query", ".", "WILDCARD", ")", ":", "clause", ".", "term", "=", "clause", ".", "term", "+", "Query", ".", "WILDCARD", "self", ".", "clauses", ".", "append", "(", "clause", ")", "return", "self" ]
Adds a `lunr.Clause` to this query. Unless the clause contains the fields to be matched all fields will be matched. In addition a default boost of 1 is applied to the clause. If the first argument is a `lunr.Clause` it will be mutated and added, otherwise args and kwargs will be used in the constructor. Returns: lunr.Query: The Query itself.
[ "Adds", "a", "lunr", ".", "Clause", "to", "this", "query", "." ]
28ec3f6d4888295eed730211ee9617aa488d6ba3
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/query.py#L43-L74
train