repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
pyout/pyout
pyout/field.py
StyleProcessors.by_lookup
def by_lookup(self, style_key, style_value): """Return a processor that extracts the style from `mapping`. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "lookup" key whose value is a "mapping" style value that maps a field value to either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None mapping = style_value["lookup"] def proc(value, result): try: lookup_value = mapping[value] except (KeyError, TypeError): # ^ TypeError is included in case the user passes non-hashable # values. return result if not lookup_value: return result return self.render(style_attr or lookup_value, result) return proc
python
def by_lookup(self, style_key, style_value): """Return a processor that extracts the style from `mapping`. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "lookup" key whose value is a "mapping" style value that maps a field value to either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None mapping = style_value["lookup"] def proc(value, result): try: lookup_value = mapping[value] except (KeyError, TypeError): # ^ TypeError is included in case the user passes non-hashable # values. return result if not lookup_value: return result return self.render(style_attr or lookup_value, result) return proc
[ "def", "by_lookup", "(", "self", ",", "style_key", ",", "style_value", ")", ":", "style_attr", "=", "style_key", "if", "self", ".", "style_types", "[", "style_key", "]", "is", "bool", "else", "None", "mapping", "=", "style_value", "[", "\"lookup\"", "]", "def", "proc", "(", "value", ",", "result", ")", ":", "try", ":", "lookup_value", "=", "mapping", "[", "value", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "# ^ TypeError is included in case the user passes non-hashable", "# values.", "return", "result", "if", "not", "lookup_value", ":", "return", "result", "return", "self", ".", "render", "(", "style_attr", "or", "lookup_value", ",", "result", ")", "return", "proc" ]
Return a processor that extracts the style from `mapping`. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "lookup" key whose value is a "mapping" style value that maps a field value to either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function.
[ "Return", "a", "processor", "that", "extracts", "the", "style", "from", "mapping", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L289-L320
pyout/pyout
pyout/field.py
StyleProcessors.by_re_lookup
def by_re_lookup(self, style_key, style_value, re_flags=0): """Return a processor for a "re_lookup" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "re_lookup" style value that consists of a sequence of items where each item should have the form `(regexp, x)`, where regexp is a regular expression to match against the field value and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. re_flags : int Passed through as flags argument to re.compile. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None regexps = [(re.compile(r, flags=re_flags), v) for r, v in style_value["re_lookup"]] def proc(value, result): if not isinstance(value, six.string_types): return result for r, lookup_value in regexps: if r.search(value): if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
python
def by_re_lookup(self, style_key, style_value, re_flags=0): """Return a processor for a "re_lookup" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "re_lookup" style value that consists of a sequence of items where each item should have the form `(regexp, x)`, where regexp is a regular expression to match against the field value and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. re_flags : int Passed through as flags argument to re.compile. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None regexps = [(re.compile(r, flags=re_flags), v) for r, v in style_value["re_lookup"]] def proc(value, result): if not isinstance(value, six.string_types): return result for r, lookup_value in regexps: if r.search(value): if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
[ "def", "by_re_lookup", "(", "self", ",", "style_key", ",", "style_value", ",", "re_flags", "=", "0", ")", ":", "style_attr", "=", "style_key", "if", "self", ".", "style_types", "[", "style_key", "]", "is", "bool", "else", "None", "regexps", "=", "[", "(", "re", ".", "compile", "(", "r", ",", "flags", "=", "re_flags", ")", ",", "v", ")", "for", "r", ",", "v", "in", "style_value", "[", "\"re_lookup\"", "]", "]", "def", "proc", "(", "value", ",", "result", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "return", "result", "for", "r", ",", "lookup_value", "in", "regexps", ":", "if", "r", ".", "search", "(", "value", ")", ":", "if", "not", "lookup_value", ":", "return", "result", "return", "self", ".", "render", "(", "style_attr", "or", "lookup_value", ",", "result", ")", "return", "result", "return", "proc" ]
Return a processor for a "re_lookup" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with a "re_lookup" style value that consists of a sequence of items where each item should have the form `(regexp, x)`, where regexp is a regular expression to match against the field value and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. re_flags : int Passed through as flags argument to re.compile. Returns ------- A function.
[ "Return", "a", "processor", "for", "a", "re_lookup", "style", "value", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L322-L355
pyout/pyout
pyout/field.py
StyleProcessors.by_interval_lookup
def by_interval_lookup(self, style_key, style_value): """Return a processor for an "interval" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with an "interval" key whose value consists of a sequence of tuples where each tuple should have the form `(start, end, x)`, where start is the start of the interval (inclusive), end is the end of the interval, and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None intervals = style_value["interval"] def proc(value, result): try: value = float(value) except TypeError: return result for start, end, lookup_value in intervals: if start is None: start = float("-inf") if end is None: end = float("inf") if start <= value < end: if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
python
def by_interval_lookup(self, style_key, style_value): """Return a processor for an "interval" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with an "interval" key whose value consists of a sequence of tuples where each tuple should have the form `(start, end, x)`, where start is the start of the interval (inclusive), end is the end of the interval, and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function. """ style_attr = style_key if self.style_types[style_key] is bool else None intervals = style_value["interval"] def proc(value, result): try: value = float(value) except TypeError: return result for start, end, lookup_value in intervals: if start is None: start = float("-inf") if end is None: end = float("inf") if start <= value < end: if not lookup_value: return result return self.render(style_attr or lookup_value, result) return result return proc
[ "def", "by_interval_lookup", "(", "self", ",", "style_key", ",", "style_value", ")", ":", "style_attr", "=", "style_key", "if", "self", ".", "style_types", "[", "style_key", "]", "is", "bool", "else", "None", "intervals", "=", "style_value", "[", "\"interval\"", "]", "def", "proc", "(", "value", ",", "result", ")", ":", "try", ":", "value", "=", "float", "(", "value", ")", "except", "TypeError", ":", "return", "result", "for", "start", ",", "end", ",", "lookup_value", "in", "intervals", ":", "if", "start", "is", "None", ":", "start", "=", "float", "(", "\"-inf\"", ")", "if", "end", "is", "None", ":", "end", "=", "float", "(", "\"inf\"", ")", "if", "start", "<=", "value", "<", "end", ":", "if", "not", "lookup_value", ":", "return", "result", "return", "self", ".", "render", "(", "style_attr", "or", "lookup_value", ",", "result", ")", "return", "result", "return", "proc" ]
Return a processor for an "interval" style value. Parameters ---------- style_key : str A style key. style_value : dict A dictionary with an "interval" key whose value consists of a sequence of tuples where each tuple should have the form `(start, end, x)`, where start is the start of the interval (inclusive), end is the end of the interval, and x is either a style attribute (str) and a boolean flag indicating to use the style attribute named by `style_key`. Returns ------- A function.
[ "Return", "a", "processor", "for", "an", "interval", "style", "value", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L357-L396
pyout/pyout
pyout/field.py
StyleProcessors.post_from_style
def post_from_style(self, column_style): """Yield post-format processors based on `column_style`. Parameters ---------- column_style : dict A style where the top-level keys correspond to style attributes such as "bold" or "color". Returns ------- A generator object. """ flanks = Flanks() yield flanks.split_flanks fns = {"simple": self.by_key, "lookup": self.by_lookup, "re_lookup": self.by_re_lookup, "interval": self.by_interval_lookup} for key in self.style_types: if key not in column_style: continue vtype = value_type(column_style[key]) fn = fns[vtype] args = [key, column_style[key]] if vtype == "re_lookup": args.append(sum(getattr(re, f) for f in column_style.get("re_flags", []))) yield fn(*args) yield flanks.join_flanks
python
def post_from_style(self, column_style): """Yield post-format processors based on `column_style`. Parameters ---------- column_style : dict A style where the top-level keys correspond to style attributes such as "bold" or "color". Returns ------- A generator object. """ flanks = Flanks() yield flanks.split_flanks fns = {"simple": self.by_key, "lookup": self.by_lookup, "re_lookup": self.by_re_lookup, "interval": self.by_interval_lookup} for key in self.style_types: if key not in column_style: continue vtype = value_type(column_style[key]) fn = fns[vtype] args = [key, column_style[key]] if vtype == "re_lookup": args.append(sum(getattr(re, f) for f in column_style.get("re_flags", []))) yield fn(*args) yield flanks.join_flanks
[ "def", "post_from_style", "(", "self", ",", "column_style", ")", ":", "flanks", "=", "Flanks", "(", ")", "yield", "flanks", ".", "split_flanks", "fns", "=", "{", "\"simple\"", ":", "self", ".", "by_key", ",", "\"lookup\"", ":", "self", ".", "by_lookup", ",", "\"re_lookup\"", ":", "self", ".", "by_re_lookup", ",", "\"interval\"", ":", "self", ".", "by_interval_lookup", "}", "for", "key", "in", "self", ".", "style_types", ":", "if", "key", "not", "in", "column_style", ":", "continue", "vtype", "=", "value_type", "(", "column_style", "[", "key", "]", ")", "fn", "=", "fns", "[", "vtype", "]", "args", "=", "[", "key", ",", "column_style", "[", "key", "]", "]", "if", "vtype", "==", "\"re_lookup\"", ":", "args", ".", "append", "(", "sum", "(", "getattr", "(", "re", ",", "f", ")", "for", "f", "in", "column_style", ".", "get", "(", "\"re_flags\"", ",", "[", "]", ")", ")", ")", "yield", "fn", "(", "*", "args", ")", "yield", "flanks", ".", "join_flanks" ]
Yield post-format processors based on `column_style`. Parameters ---------- column_style : dict A style where the top-level keys correspond to style attributes such as "bold" or "color". Returns ------- A generator object.
[ "Yield", "post", "-", "format", "processors", "based", "on", "column_style", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L414-L447
pyout/pyout
pyout/field.py
Flanks.split_flanks
def split_flanks(self, _, result): """Return `result` without flanking whitespace. """ if not result.strip(): self.left, self.right = "", "" return result match = self.flank_re.match(result) assert match, "This regexp should always match" self.left, self.right = match.group(1), match.group(3) return match.group(2)
python
def split_flanks(self, _, result): """Return `result` without flanking whitespace. """ if not result.strip(): self.left, self.right = "", "" return result match = self.flank_re.match(result) assert match, "This regexp should always match" self.left, self.right = match.group(1), match.group(3) return match.group(2)
[ "def", "split_flanks", "(", "self", ",", "_", ",", "result", ")", ":", "if", "not", "result", ".", "strip", "(", ")", ":", "self", ".", "left", ",", "self", ".", "right", "=", "\"\"", ",", "\"\"", "return", "result", "match", "=", "self", ".", "flank_re", ".", "match", "(", "result", ")", "assert", "match", ",", "\"This regexp should always match\"", "self", ".", "left", ",", "self", ".", "right", "=", "match", ".", "group", "(", "1", ")", ",", "match", ".", "group", "(", "3", ")", "return", "match", ".", "group", "(", "2", ")" ]
Return `result` without flanking whitespace.
[ "Return", "result", "without", "flanking", "whitespace", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L459-L469
pyout/pyout
pyout/field.py
TermProcessors.render
def render(self, style_attr, value): """Prepend terminal code for `key` to `value`. Parameters ---------- style_attr : str A style attribute (e.g., "bold" or "blue"). value : str The value to render. Returns ------- The code for `key` (e.g., "\x1b[1m" for bold) plus the original value. """ if not value.strip(): # We've got an empty string. Don't bother adding any # codes. return value return six.text_type(getattr(self.term, style_attr)) + value
python
def render(self, style_attr, value): """Prepend terminal code for `key` to `value`. Parameters ---------- style_attr : str A style attribute (e.g., "bold" or "blue"). value : str The value to render. Returns ------- The code for `key` (e.g., "\x1b[1m" for bold) plus the original value. """ if not value.strip(): # We've got an empty string. Don't bother adding any # codes. return value return six.text_type(getattr(self.term, style_attr)) + value
[ "def", "render", "(", "self", ",", "style_attr", ",", "value", ")", ":", "if", "not", "value", ".", "strip", "(", ")", ":", "# We've got an empty string. Don't bother adding any", "# codes.", "return", "value", "return", "six", ".", "text_type", "(", "getattr", "(", "self", ".", "term", ",", "style_attr", ")", ")", "+", "value" ]
Prepend terminal code for `key` to `value`. Parameters ---------- style_attr : str A style attribute (e.g., "bold" or "blue"). value : str The value to render. Returns ------- The code for `key` (e.g., "\x1b[1m" for bold) plus the original value.
[ "Prepend", "terminal", "code", "for", "key", "to", "value", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L495-L514
pyout/pyout
pyout/field.py
TermProcessors.post_from_style
def post_from_style(self, column_style): """A Terminal-specific reset to StyleProcessors.post_from_style. """ for proc in super(TermProcessors, self).post_from_style(column_style): if proc.__name__ == "join_flanks": # Reset any codes before adding back whitespace. yield self._maybe_reset() yield proc
python
def post_from_style(self, column_style): """A Terminal-specific reset to StyleProcessors.post_from_style. """ for proc in super(TermProcessors, self).post_from_style(column_style): if proc.__name__ == "join_flanks": # Reset any codes before adding back whitespace. yield self._maybe_reset() yield proc
[ "def", "post_from_style", "(", "self", ",", "column_style", ")", ":", "for", "proc", "in", "super", "(", "TermProcessors", ",", "self", ")", ".", "post_from_style", "(", "column_style", ")", ":", "if", "proc", ".", "__name__", "==", "\"join_flanks\"", ":", "# Reset any codes before adding back whitespace.", "yield", "self", ".", "_maybe_reset", "(", ")", "yield", "proc" ]
A Terminal-specific reset to StyleProcessors.post_from_style.
[ "A", "Terminal", "-", "specific", "reset", "to", "StyleProcessors", ".", "post_from_style", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/field.py#L523-L530
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
InstanceProxy.connect
def connect(self, cback): "See signal" return self.signal.connect(cback, subscribers=self.subscribers, instance=self.instance)
python
def connect(self, cback): "See signal" return self.signal.connect(cback, subscribers=self.subscribers, instance=self.instance)
[ "def", "connect", "(", "self", ",", "cback", ")", ":", "return", "self", ".", "signal", ".", "connect", "(", "cback", ",", "subscribers", "=", "self", ".", "subscribers", ",", "instance", "=", "self", ".", "instance", ")" ]
See signal
[ "See", "signal" ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L56-L60
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
InstanceProxy.disconnect
def disconnect(self, cback): "See signal" return self.signal.disconnect(cback, subscribers=self.subscribers, instance=self.instance)
python
def disconnect(self, cback): "See signal" return self.signal.disconnect(cback, subscribers=self.subscribers, instance=self.instance)
[ "def", "disconnect", "(", "self", ",", "cback", ")", ":", "return", "self", ".", "signal", ".", "disconnect", "(", "cback", ",", "subscribers", "=", "self", ".", "subscribers", ",", "instance", "=", "self", ".", "instance", ")" ]
See signal
[ "See", "signal" ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L62-L66
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
InstanceProxy.get_subscribers
def get_subscribers(self): """Get per-instance subscribers from the signal. """ data = self.signal.instance_subscribers if self.instance not in data: data[self.instance] = MethodAwareWeakList() return data[self.instance]
python
def get_subscribers(self): """Get per-instance subscribers from the signal. """ data = self.signal.instance_subscribers if self.instance not in data: data[self.instance] = MethodAwareWeakList() return data[self.instance]
[ "def", "get_subscribers", "(", "self", ")", ":", "data", "=", "self", ".", "signal", ".", "instance_subscribers", "if", "self", ".", "instance", "not", "in", "data", ":", "data", "[", "self", ".", "instance", "]", "=", "MethodAwareWeakList", "(", ")", "return", "data", "[", "self", ".", "instance", "]" ]
Get per-instance subscribers from the signal.
[ "Get", "per", "-", "instance", "subscribers", "from", "the", "signal", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L68-L74
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
InstanceProxy.notify
def notify(self, *args, **kwargs): "See signal" loop = kwargs.pop('loop', self.loop) return self.signal.prepare_notification( subscribers=self.subscribers, instance=self.instance, loop=loop).run(*args, **kwargs)
python
def notify(self, *args, **kwargs): "See signal" loop = kwargs.pop('loop', self.loop) return self.signal.prepare_notification( subscribers=self.subscribers, instance=self.instance, loop=loop).run(*args, **kwargs)
[ "def", "notify", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "loop", "=", "kwargs", ".", "pop", "(", "'loop'", ",", "self", ".", "loop", ")", "return", "self", ".", "signal", ".", "prepare_notification", "(", "subscribers", "=", "self", ".", "subscribers", ",", "instance", "=", "self", ".", "instance", ",", "loop", "=", "loop", ")", ".", "run", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
See signal
[ "See", "signal" ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L80-L85
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
InstanceProxy.notify_prepared
def notify_prepared(self, args=None, kwargs=None, **opts): """Like notify allows to pass more options to the underlying `Signal.prepare_notification()` method. The allowed options are: notify_external : bool a flag indicating if the notification should also include the registered `~.external.ExternalSignaller` in the notification. It's ``True`` by default """ if args is None: args = () if kwargs is None: kwargs = {} loop = kwargs.pop('loop', self.loop) return self.signal.prepare_notification( subscribers=self.subscribers, instance=self.instance, loop=loop, **opts).run(*args, **kwargs)
python
def notify_prepared(self, args=None, kwargs=None, **opts): """Like notify allows to pass more options to the underlying `Signal.prepare_notification()` method. The allowed options are: notify_external : bool a flag indicating if the notification should also include the registered `~.external.ExternalSignaller` in the notification. It's ``True`` by default """ if args is None: args = () if kwargs is None: kwargs = {} loop = kwargs.pop('loop', self.loop) return self.signal.prepare_notification( subscribers=self.subscribers, instance=self.instance, loop=loop, **opts).run(*args, **kwargs)
[ "def", "notify_prepared", "(", "self", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "*", "*", "opts", ")", ":", "if", "args", "is", "None", ":", "args", "=", "(", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "loop", "=", "kwargs", ".", "pop", "(", "'loop'", ",", "self", ".", "loop", ")", "return", "self", ".", "signal", ".", "prepare_notification", "(", "subscribers", "=", "self", ".", "subscribers", ",", "instance", "=", "self", ".", "instance", ",", "loop", "=", "loop", ",", "*", "*", "opts", ")", ".", "run", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Like notify allows to pass more options to the underlying `Signal.prepare_notification()` method. The allowed options are: notify_external : bool a flag indicating if the notification should also include the registered `~.external.ExternalSignaller` in the notification. It's ``True`` by default
[ "Like", "notify", "allows", "to", "pass", "more", "options", "to", "the", "underlying", "Signal", ".", "prepare_notification", "()", "method", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L89-L108
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
Signal.connect
def connect(self, cback, subscribers=None, instance=None): """Add a function or a method as an handler of this signal. Any handler added can be a coroutine. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """ if subscribers is None: subscribers = self.subscribers # wrapper if self._fconnect is not None: def _connect(cback): self._connect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fconnect(instance, cback, subscribers, _connect, notify) else: result = self._fconnect(cback, subscribers, _connect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._connect(subscribers, cback) result = None return result
python
def connect(self, cback, subscribers=None, instance=None): """Add a function or a method as an handler of this signal. Any handler added can be a coroutine. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """ if subscribers is None: subscribers = self.subscribers # wrapper if self._fconnect is not None: def _connect(cback): self._connect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fconnect(instance, cback, subscribers, _connect, notify) else: result = self._fconnect(cback, subscribers, _connect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._connect(subscribers, cback) result = None return result
[ "def", "connect", "(", "self", ",", "cback", ",", "subscribers", "=", "None", ",", "instance", "=", "None", ")", ":", "if", "subscribers", "is", "None", ":", "subscribers", "=", "self", ".", "subscribers", "# wrapper", "if", "self", ".", "_fconnect", "is", "not", "None", ":", "def", "_connect", "(", "cback", ")", ":", "self", ".", "_connect", "(", "subscribers", ",", "cback", ")", "notify", "=", "partial", "(", "self", ".", "_notify_one", ",", "instance", ")", "if", "instance", "is", "not", "None", ":", "result", "=", "self", ".", "_fconnect", "(", "instance", ",", "cback", ",", "subscribers", ",", "_connect", ",", "notify", ")", "else", ":", "result", "=", "self", ".", "_fconnect", "(", "cback", ",", "subscribers", ",", "_connect", ",", "notify", ")", "if", "inspect", ".", "isawaitable", "(", "result", ")", ":", "result", "=", "pull_result", "(", "result", ")", "else", ":", "self", ".", "_connect", "(", "subscribers", ",", "cback", ")", "result", "=", "None", "return", "result" ]
Add a function or a method as an handler of this signal. Any handler added can be a coroutine. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper
[ "Add", "a", "function", "or", "a", "method", "as", "an", "handler", "of", "this", "signal", ".", "Any", "handler", "added", "can", "be", "a", "coroutine", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L233-L258
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
Signal.disconnect
def disconnect(self, cback, subscribers=None, instance=None): """Remove a previously added function or method from the set of the signal's handlers. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """ if subscribers is None: subscribers = self.subscribers # wrapper if self._fdisconnect is not None: def _disconnect(cback): self._disconnect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fdisconnect(instance, cback, subscribers, _disconnect, notify) else: result = self._fdisconnect(cback, subscribers, _disconnect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._disconnect(subscribers, cback) result = None return result
python
def disconnect(self, cback, subscribers=None, instance=None): """Remove a previously added function or method from the set of the signal's handlers. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper """ if subscribers is None: subscribers = self.subscribers # wrapper if self._fdisconnect is not None: def _disconnect(cback): self._disconnect(subscribers, cback) notify = partial(self._notify_one, instance) if instance is not None: result = self._fdisconnect(instance, cback, subscribers, _disconnect, notify) else: result = self._fdisconnect(cback, subscribers, _disconnect, notify) if inspect.isawaitable(result): result = pull_result(result) else: self._disconnect(subscribers, cback) result = None return result
[ "def", "disconnect", "(", "self", ",", "cback", ",", "subscribers", "=", "None", ",", "instance", "=", "None", ")", ":", "if", "subscribers", "is", "None", ":", "subscribers", "=", "self", ".", "subscribers", "# wrapper", "if", "self", ".", "_fdisconnect", "is", "not", "None", ":", "def", "_disconnect", "(", "cback", ")", ":", "self", ".", "_disconnect", "(", "subscribers", ",", "cback", ")", "notify", "=", "partial", "(", "self", ".", "_notify_one", ",", "instance", ")", "if", "instance", "is", "not", "None", ":", "result", "=", "self", ".", "_fdisconnect", "(", "instance", ",", "cback", ",", "subscribers", ",", "_disconnect", ",", "notify", ")", "else", ":", "result", "=", "self", ".", "_fdisconnect", "(", "cback", ",", "subscribers", ",", "_disconnect", ",", "notify", ")", "if", "inspect", ".", "isawaitable", "(", "result", ")", ":", "result", "=", "pull_result", "(", "result", ")", "else", ":", "self", ".", "_disconnect", "(", "subscribers", ",", "cback", ")", "result", "=", "None", "return", "result" ]
Remove a previously added function or method from the set of the signal's handlers. :param cback: the callback (or *handler*) to be added to the set :returns: ``None`` or the value returned by the corresponding wrapper
[ "Remove", "a", "previously", "added", "function", "or", "method", "from", "the", "set", "of", "the", "signal", "s", "handlers", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L264-L290
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
Signal.ext_publish
def ext_publish(self, instance, loop, *args, **kwargs): """If 'external_signaller' is defined, calls it's publish method to notify external event systems. This is for internal usage only, but it's doumented because it's part of the interface with external notification systems. """ if self.external_signaller is not None: # Assumes that the loop is managed by the external handler return self.external_signaller.publish_signal(self, instance, loop, args, kwargs)
python
def ext_publish(self, instance, loop, *args, **kwargs): """If 'external_signaller' is defined, calls it's publish method to notify external event systems. This is for internal usage only, but it's doumented because it's part of the interface with external notification systems. """ if self.external_signaller is not None: # Assumes that the loop is managed by the external handler return self.external_signaller.publish_signal(self, instance, loop, args, kwargs)
[ "def", "ext_publish", "(", "self", ",", "instance", ",", "loop", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "external_signaller", "is", "not", "None", ":", "# Assumes that the loop is managed by the external handler", "return", "self", ".", "external_signaller", ".", "publish_signal", "(", "self", ",", "instance", ",", "loop", ",", "args", ",", "kwargs", ")" ]
If 'external_signaller' is defined, calls it's publish method to notify external event systems. This is for internal usage only, but it's doumented because it's part of the interface with external notification systems.
[ "If", "external_signaller", "is", "defined", "calls", "it", "s", "publish", "method", "to", "notify", "external", "event", "systems", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L292-L302
metapensiero/metapensiero.signal
src/metapensiero/signal/core.py
Signal.prepare_notification
def prepare_notification(self, *, subscribers=None, instance=None, loop=None, notify_external=True): """Sets up a and configures an `~.utils.Executor`:class: instance.""" # merge callbacks added to the class level with those added to the # instance, giving the formers precedence while preserving overall # order self_subscribers = self.subscribers.copy() # add in callbacks declared in the main class body and marked with # @handler if (instance is not None and self.name and isinstance(instance.__class__, SignalAndHandlerInitMeta)): class_handlers = type(instance)._get_class_handlers( self.name, instance) for ch in class_handlers: # eventual methods are ephemeral and normally the following # condition would always be True for methods but the dict used # has logic to take that into account if ch not in self_subscribers: self_subscribers.append(ch) # add in the other instance level callbacks added at runtime if subscribers is not None: for el in subscribers: # eventual methods are ephemeral and normally the following # condition would always be True for methods but the dict used # has logic to take that into account if el not in self_subscribers: self_subscribers.append(el) loop = loop or self.loop # maybe do a round of external publishing if notify_external and self.external_signaller is not None: self_subscribers.append(partial(self.ext_publish, instance, loop)) if self._fnotify is None: fnotify = None else: if instance is None: fnotify = self._fnotify else: fnotify = types.MethodType(self._fnotify, instance) validator = self._fvalidation if validator is not None and instance is not None: validator = types.MethodType(validator, instance) return Executor(self_subscribers, owner=self, concurrent=SignalOptions.EXEC_CONCURRENT in self.flags, loop=loop, exec_wrapper=fnotify, fvalidation=validator)
python
def prepare_notification(self, *, subscribers=None, instance=None, loop=None, notify_external=True): """Sets up a and configures an `~.utils.Executor`:class: instance.""" # merge callbacks added to the class level with those added to the # instance, giving the formers precedence while preserving overall # order self_subscribers = self.subscribers.copy() # add in callbacks declared in the main class body and marked with # @handler if (instance is not None and self.name and isinstance(instance.__class__, SignalAndHandlerInitMeta)): class_handlers = type(instance)._get_class_handlers( self.name, instance) for ch in class_handlers: # eventual methods are ephemeral and normally the following # condition would always be True for methods but the dict used # has logic to take that into account if ch not in self_subscribers: self_subscribers.append(ch) # add in the other instance level callbacks added at runtime if subscribers is not None: for el in subscribers: # eventual methods are ephemeral and normally the following # condition would always be True for methods but the dict used # has logic to take that into account if el not in self_subscribers: self_subscribers.append(el) loop = loop or self.loop # maybe do a round of external publishing if notify_external and self.external_signaller is not None: self_subscribers.append(partial(self.ext_publish, instance, loop)) if self._fnotify is None: fnotify = None else: if instance is None: fnotify = self._fnotify else: fnotify = types.MethodType(self._fnotify, instance) validator = self._fvalidation if validator is not None and instance is not None: validator = types.MethodType(validator, instance) return Executor(self_subscribers, owner=self, concurrent=SignalOptions.EXEC_CONCURRENT in self.flags, loop=loop, exec_wrapper=fnotify, fvalidation=validator)
[ "def", "prepare_notification", "(", "self", ",", "*", ",", "subscribers", "=", "None", ",", "instance", "=", "None", ",", "loop", "=", "None", ",", "notify_external", "=", "True", ")", ":", "# merge callbacks added to the class level with those added to the", "# instance, giving the formers precedence while preserving overall", "# order", "self_subscribers", "=", "self", ".", "subscribers", ".", "copy", "(", ")", "# add in callbacks declared in the main class body and marked with", "# @handler", "if", "(", "instance", "is", "not", "None", "and", "self", ".", "name", "and", "isinstance", "(", "instance", ".", "__class__", ",", "SignalAndHandlerInitMeta", ")", ")", ":", "class_handlers", "=", "type", "(", "instance", ")", ".", "_get_class_handlers", "(", "self", ".", "name", ",", "instance", ")", "for", "ch", "in", "class_handlers", ":", "# eventual methods are ephemeral and normally the following", "# condition would always be True for methods but the dict used", "# has logic to take that into account", "if", "ch", "not", "in", "self_subscribers", ":", "self_subscribers", ".", "append", "(", "ch", ")", "# add in the other instance level callbacks added at runtime", "if", "subscribers", "is", "not", "None", ":", "for", "el", "in", "subscribers", ":", "# eventual methods are ephemeral and normally the following", "# condition would always be True for methods but the dict used", "# has logic to take that into account", "if", "el", "not", "in", "self_subscribers", ":", "self_subscribers", ".", "append", "(", "el", ")", "loop", "=", "loop", "or", "self", ".", "loop", "# maybe do a round of external publishing", "if", "notify_external", "and", "self", ".", "external_signaller", "is", "not", "None", ":", "self_subscribers", ".", "append", "(", "partial", "(", "self", ".", "ext_publish", ",", "instance", ",", "loop", ")", ")", "if", "self", ".", "_fnotify", "is", "None", ":", "fnotify", "=", "None", "else", ":", "if", "instance", "is", "None", ":", "fnotify", "=", "self", ".", "_fnotify", "else", ":", "fnotify", "=", "types", ".", "MethodType", "(", "self", ".", "_fnotify", ",", "instance", ")", "validator", "=", "self", ".", "_fvalidation", "if", "validator", "is", "not", "None", "and", "instance", "is", "not", "None", ":", "validator", "=", "types", ".", "MethodType", "(", "validator", ",", "instance", ")", "return", "Executor", "(", "self_subscribers", ",", "owner", "=", "self", ",", "concurrent", "=", "SignalOptions", ".", "EXEC_CONCURRENT", "in", "self", ".", "flags", ",", "loop", "=", "loop", ",", "exec_wrapper", "=", "fnotify", ",", "fvalidation", "=", "validator", ")" ]
Sets up a and configures an `~.utils.Executor`:class: instance.
[ "Sets", "up", "a", "and", "configures", "an", "~", ".", "utils", ".", "Executor", ":", "class", ":", "instance", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/core.py#L339-L383
alefnula/tea
tea/logger/log.py
configure_logging
def configure_logging( filename=None, filemode="a", datefmt=FMT_DATE, fmt=FMT, stdout_fmt=FMT_STDOUT, level=logging.DEBUG, stdout_level=logging.WARNING, initial_file_message="", max_size=1048576, rotations_number=5, remove_handlers=True, ): """Configure logging module. Args: filename (str): Specifies a filename to log to. filemode (str): Specifies the mode to open the log file. Values: ``'a'``, ``'w'``. *Default:* ``a``. datefmt (str): Use the specified date/time format. fmt (str): Format string for the file handler. stdout_fmt (str): Format string for the stdout handler. level (int): Log level for the file handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.DEBUG`` stdout_level (int): Log level for the stdout handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.WARNING`` initial_file_message (str): First log entry written in file. max_size (int): Maximal size of the logfile. If the size of the file exceed the maximal size it will be rotated. rotations_number (int): Number of rotations to save. remove_handlers (bool): Remove all existing handlers. """ logger = logging.getLogger() logger.level = logging.NOTSET # Remove all handlers if remove_handlers: while len(logger.handlers) > 0: hdlr = logger.handlers[0] hdlr.close() logger.removeHandler(hdlr) # Create stdout handler if stdout_level is not None: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(stdout_level) stdout_formatter = logging.Formatter(stdout_fmt, datefmt) # stdoutFormatter.converter = time.gmtime stdout_handler.setFormatter(stdout_formatter) logger.addHandler(stdout_handler) # Create file handler if filename is provided if filename is not None: # Check if filename directory exists and creates it if it doesn't directory = os.path.abspath(os.path.dirname(filename)) if not os.path.isdir(directory): shell.mkdir(directory) # Create file handler file_handler = RotatingFileHandler( filename, filemode, max_size, rotations_number ) file_handler.setLevel(level) file_formatter = logging.Formatter(fmt, datefmt) file_formatter.converter = time.gmtime file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) if initial_file_message: message = " %s " % initial_file_message file_handler.stream.write("\n" + message.center(100, "=") + "\n\n")
python
def configure_logging( filename=None, filemode="a", datefmt=FMT_DATE, fmt=FMT, stdout_fmt=FMT_STDOUT, level=logging.DEBUG, stdout_level=logging.WARNING, initial_file_message="", max_size=1048576, rotations_number=5, remove_handlers=True, ): """Configure logging module. Args: filename (str): Specifies a filename to log to. filemode (str): Specifies the mode to open the log file. Values: ``'a'``, ``'w'``. *Default:* ``a``. datefmt (str): Use the specified date/time format. fmt (str): Format string for the file handler. stdout_fmt (str): Format string for the stdout handler. level (int): Log level for the file handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.DEBUG`` stdout_level (int): Log level for the stdout handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.WARNING`` initial_file_message (str): First log entry written in file. max_size (int): Maximal size of the logfile. If the size of the file exceed the maximal size it will be rotated. rotations_number (int): Number of rotations to save. remove_handlers (bool): Remove all existing handlers. """ logger = logging.getLogger() logger.level = logging.NOTSET # Remove all handlers if remove_handlers: while len(logger.handlers) > 0: hdlr = logger.handlers[0] hdlr.close() logger.removeHandler(hdlr) # Create stdout handler if stdout_level is not None: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(stdout_level) stdout_formatter = logging.Formatter(stdout_fmt, datefmt) # stdoutFormatter.converter = time.gmtime stdout_handler.setFormatter(stdout_formatter) logger.addHandler(stdout_handler) # Create file handler if filename is provided if filename is not None: # Check if filename directory exists and creates it if it doesn't directory = os.path.abspath(os.path.dirname(filename)) if not os.path.isdir(directory): shell.mkdir(directory) # Create file handler file_handler = RotatingFileHandler( filename, filemode, max_size, rotations_number ) file_handler.setLevel(level) file_formatter = logging.Formatter(fmt, datefmt) file_formatter.converter = time.gmtime file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) if initial_file_message: message = " %s " % initial_file_message file_handler.stream.write("\n" + message.center(100, "=") + "\n\n")
[ "def", "configure_logging", "(", "filename", "=", "None", ",", "filemode", "=", "\"a\"", ",", "datefmt", "=", "FMT_DATE", ",", "fmt", "=", "FMT", ",", "stdout_fmt", "=", "FMT_STDOUT", ",", "level", "=", "logging", ".", "DEBUG", ",", "stdout_level", "=", "logging", ".", "WARNING", ",", "initial_file_message", "=", "\"\"", ",", "max_size", "=", "1048576", ",", "rotations_number", "=", "5", ",", "remove_handlers", "=", "True", ",", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "logger", ".", "level", "=", "logging", ".", "NOTSET", "# Remove all handlers", "if", "remove_handlers", ":", "while", "len", "(", "logger", ".", "handlers", ")", ">", "0", ":", "hdlr", "=", "logger", ".", "handlers", "[", "0", "]", "hdlr", ".", "close", "(", ")", "logger", ".", "removeHandler", "(", "hdlr", ")", "# Create stdout handler", "if", "stdout_level", "is", "not", "None", ":", "stdout_handler", "=", "logging", ".", "StreamHandler", "(", "sys", ".", "stdout", ")", "stdout_handler", ".", "setLevel", "(", "stdout_level", ")", "stdout_formatter", "=", "logging", ".", "Formatter", "(", "stdout_fmt", ",", "datefmt", ")", "# stdoutFormatter.converter = time.gmtime", "stdout_handler", ".", "setFormatter", "(", "stdout_formatter", ")", "logger", ".", "addHandler", "(", "stdout_handler", ")", "# Create file handler if filename is provided", "if", "filename", "is", "not", "None", ":", "# Check if filename directory exists and creates it if it doesn't", "directory", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "shell", ".", "mkdir", "(", "directory", ")", "# Create file handler", "file_handler", "=", "RotatingFileHandler", "(", "filename", ",", "filemode", ",", "max_size", ",", "rotations_number", ")", "file_handler", ".", "setLevel", "(", "level", ")", "file_formatter", "=", "logging", ".", "Formatter", "(", "fmt", ",", "datefmt", ")", "file_formatter", ".", "converter", "=", "time", ".", "gmtime", "file_handler", ".", "setFormatter", "(", "file_formatter", ")", "logger", ".", "addHandler", "(", "file_handler", ")", "if", "initial_file_message", ":", "message", "=", "\" %s \"", "%", "initial_file_message", "file_handler", ".", "stream", ".", "write", "(", "\"\\n\"", "+", "message", ".", "center", "(", "100", ",", "\"=\"", ")", "+", "\"\\n\\n\"", ")" ]
Configure logging module. Args: filename (str): Specifies a filename to log to. filemode (str): Specifies the mode to open the log file. Values: ``'a'``, ``'w'``. *Default:* ``a``. datefmt (str): Use the specified date/time format. fmt (str): Format string for the file handler. stdout_fmt (str): Format string for the stdout handler. level (int): Log level for the file handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.DEBUG`` stdout_level (int): Log level for the stdout handler. Log levels are the same as the log levels from the standard :mod:`logging` module. *Default:* ``logging.WARNING`` initial_file_message (str): First log entry written in file. max_size (int): Maximal size of the logfile. If the size of the file exceed the maximal size it will be rotated. rotations_number (int): Number of rotations to save. remove_handlers (bool): Remove all existing handlers.
[ "Configure", "logging", "module", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/logger/log.py#L31-L98
jonhadfield/creds
lib/creds/plan.py
create_plan
def create_plan(existing_users=None, proposed_users=None, purge_undefined=None, protected_users=None, allow_non_unique_id=None, manage_home=True, manage_keys=True): """Determine what changes are required. args: existing_users (Users): List of discovered users proposed_users (Users): List of proposed users purge_undefined (bool): Remove discovered users that have not been defined in proposed users list protected_users (list): List of users' names that should not be evaluated as part of the plan creation process allow_non_unique_id (bool): Allow more than one user to have the same uid manage_home (bool): Create/remove users' home directories manage_keys (bool): Add/update/remove users' keys (manage_home must also be true) returns: list: Differences between discovered and proposed users with a list of operations that will achieve the desired state. """ plan = list() proposed_usernames = list() if not purge_undefined: purge_undefined = constants.PURGE_UNDEFINED if not protected_users: protected_users = constants.PROTECTED_USERS if not allow_non_unique_id: allow_non_unique_id = constants.ALLOW_NON_UNIQUE_ID # Create list of modifications to make based on proposed users compared to existing users for proposed_user in proposed_users: proposed_usernames.append(proposed_user.name) user_matching_name = existing_users.describe_users(users_filter=dict(name=proposed_user.name)) user_matching_id = get_user_by_uid(uid=proposed_user.uid, users=existing_users) # If user does not exist if not allow_non_unique_id and user_matching_id and not user_matching_name: plan.append( dict(action='fail', error='uid_clash', proposed_user=proposed_user, state='existing', result=None)) elif not user_matching_name: plan.append( dict(action='add', proposed_user=proposed_user, state='missing', result=None, manage_home=manage_home, manage_keys=manage_keys)) # If they do, then compare else: user_comparison = compare_user(passed_user=proposed_user, user_list=existing_users) if user_comparison.get('result'): plan.append( dict(action='update', proposed_user=proposed_user, state='existing', user_comparison=user_comparison, manage_home=manage_home, manage_keys=manage_keys)) # Application of the proposed user list will not result in deletion of users that need to be removed # If 'PURGE_UNDEFINED' then look for existing users that are not defined in proposed usernames and mark for removal if purge_undefined: for existing_user in existing_users: if existing_user.name not in proposed_usernames: if existing_user.name not in protected_users: plan.append( dict(action='delete', username=existing_user.name, state='existing', manage_home=manage_home, manage_keys=manage_keys)) return plan
python
def create_plan(existing_users=None, proposed_users=None, purge_undefined=None, protected_users=None, allow_non_unique_id=None, manage_home=True, manage_keys=True): """Determine what changes are required. args: existing_users (Users): List of discovered users proposed_users (Users): List of proposed users purge_undefined (bool): Remove discovered users that have not been defined in proposed users list protected_users (list): List of users' names that should not be evaluated as part of the plan creation process allow_non_unique_id (bool): Allow more than one user to have the same uid manage_home (bool): Create/remove users' home directories manage_keys (bool): Add/update/remove users' keys (manage_home must also be true) returns: list: Differences between discovered and proposed users with a list of operations that will achieve the desired state. """ plan = list() proposed_usernames = list() if not purge_undefined: purge_undefined = constants.PURGE_UNDEFINED if not protected_users: protected_users = constants.PROTECTED_USERS if not allow_non_unique_id: allow_non_unique_id = constants.ALLOW_NON_UNIQUE_ID # Create list of modifications to make based on proposed users compared to existing users for proposed_user in proposed_users: proposed_usernames.append(proposed_user.name) user_matching_name = existing_users.describe_users(users_filter=dict(name=proposed_user.name)) user_matching_id = get_user_by_uid(uid=proposed_user.uid, users=existing_users) # If user does not exist if not allow_non_unique_id and user_matching_id and not user_matching_name: plan.append( dict(action='fail', error='uid_clash', proposed_user=proposed_user, state='existing', result=None)) elif not user_matching_name: plan.append( dict(action='add', proposed_user=proposed_user, state='missing', result=None, manage_home=manage_home, manage_keys=manage_keys)) # If they do, then compare else: user_comparison = compare_user(passed_user=proposed_user, user_list=existing_users) if user_comparison.get('result'): plan.append( dict(action='update', proposed_user=proposed_user, state='existing', user_comparison=user_comparison, manage_home=manage_home, manage_keys=manage_keys)) # Application of the proposed user list will not result in deletion of users that need to be removed # If 'PURGE_UNDEFINED' then look for existing users that are not defined in proposed usernames and mark for removal if purge_undefined: for existing_user in existing_users: if existing_user.name not in proposed_usernames: if existing_user.name not in protected_users: plan.append( dict(action='delete', username=existing_user.name, state='existing', manage_home=manage_home, manage_keys=manage_keys)) return plan
[ "def", "create_plan", "(", "existing_users", "=", "None", ",", "proposed_users", "=", "None", ",", "purge_undefined", "=", "None", ",", "protected_users", "=", "None", ",", "allow_non_unique_id", "=", "None", ",", "manage_home", "=", "True", ",", "manage_keys", "=", "True", ")", ":", "plan", "=", "list", "(", ")", "proposed_usernames", "=", "list", "(", ")", "if", "not", "purge_undefined", ":", "purge_undefined", "=", "constants", ".", "PURGE_UNDEFINED", "if", "not", "protected_users", ":", "protected_users", "=", "constants", ".", "PROTECTED_USERS", "if", "not", "allow_non_unique_id", ":", "allow_non_unique_id", "=", "constants", ".", "ALLOW_NON_UNIQUE_ID", "# Create list of modifications to make based on proposed users compared to existing users", "for", "proposed_user", "in", "proposed_users", ":", "proposed_usernames", ".", "append", "(", "proposed_user", ".", "name", ")", "user_matching_name", "=", "existing_users", ".", "describe_users", "(", "users_filter", "=", "dict", "(", "name", "=", "proposed_user", ".", "name", ")", ")", "user_matching_id", "=", "get_user_by_uid", "(", "uid", "=", "proposed_user", ".", "uid", ",", "users", "=", "existing_users", ")", "# If user does not exist", "if", "not", "allow_non_unique_id", "and", "user_matching_id", "and", "not", "user_matching_name", ":", "plan", ".", "append", "(", "dict", "(", "action", "=", "'fail'", ",", "error", "=", "'uid_clash'", ",", "proposed_user", "=", "proposed_user", ",", "state", "=", "'existing'", ",", "result", "=", "None", ")", ")", "elif", "not", "user_matching_name", ":", "plan", ".", "append", "(", "dict", "(", "action", "=", "'add'", ",", "proposed_user", "=", "proposed_user", ",", "state", "=", "'missing'", ",", "result", "=", "None", ",", "manage_home", "=", "manage_home", ",", "manage_keys", "=", "manage_keys", ")", ")", "# If they do, then compare", "else", ":", "user_comparison", "=", "compare_user", "(", "passed_user", "=", "proposed_user", ",", "user_list", "=", "existing_users", ")", "if", "user_comparison", ".", "get", "(", "'result'", ")", ":", "plan", ".", "append", "(", "dict", "(", "action", "=", "'update'", ",", "proposed_user", "=", "proposed_user", ",", "state", "=", "'existing'", ",", "user_comparison", "=", "user_comparison", ",", "manage_home", "=", "manage_home", ",", "manage_keys", "=", "manage_keys", ")", ")", "# Application of the proposed user list will not result in deletion of users that need to be removed", "# If 'PURGE_UNDEFINED' then look for existing users that are not defined in proposed usernames and mark for removal", "if", "purge_undefined", ":", "for", "existing_user", "in", "existing_users", ":", "if", "existing_user", ".", "name", "not", "in", "proposed_usernames", ":", "if", "existing_user", ".", "name", "not", "in", "protected_users", ":", "plan", ".", "append", "(", "dict", "(", "action", "=", "'delete'", ",", "username", "=", "existing_user", ".", "name", ",", "state", "=", "'existing'", ",", "manage_home", "=", "manage_home", ",", "manage_keys", "=", "manage_keys", ")", ")", "return", "plan" ]
Determine what changes are required. args: existing_users (Users): List of discovered users proposed_users (Users): List of proposed users purge_undefined (bool): Remove discovered users that have not been defined in proposed users list protected_users (list): List of users' names that should not be evaluated as part of the plan creation process allow_non_unique_id (bool): Allow more than one user to have the same uid manage_home (bool): Create/remove users' home directories manage_keys (bool): Add/update/remove users' keys (manage_home must also be true) returns: list: Differences between discovered and proposed users with a list of operations that will achieve the desired state.
[ "Determine", "what", "changes", "are", "required", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/plan.py#L13-L70
jonhadfield/creds
lib/creds/plan.py
execute_plan
def execute_plan(plan=None): """Create, Modify or Delete, depending on plan item.""" execution_result = list() for task in plan: action = task['action'] if action == 'delete': command = generate_delete_user_command(username=task.get('username'), manage_home=task['manage_home']) command_output = execute_command(command) execution_result.append(dict(task=task, command_output=command_output)) remove_sudoers_entry(username=task.get('username')) elif action == 'add': command = generate_add_user_command(proposed_user=task.get('proposed_user'), manage_home=task['manage_home']) command_output = execute_command(command) if task['proposed_user'].public_keys and task['manage_home'] and task['manage_keys']: write_authorized_keys(task['proposed_user']) if task['proposed_user'].sudoers_entry: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['proposed_user'].sudoers_entry) execution_result.append(dict(task=task, command_output=command_output)) elif action == 'update': result = task['user_comparison'].get('result') # Don't modify user if only keys have changed action_count = 0 for k, _ in iteritems(result): if '_action' in k: action_count += 1 command_output = None if task['manage_home'] and task['manage_keys'] and action_count == 1 and 'public_keys_action' in result: write_authorized_keys(task['proposed_user']) elif action_count == 1 and 'sudoers_entry_action' in result: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) else: command = generate_modify_user_command(task=task) command_output = execute_command(command) if task['manage_home'] and task['manage_keys'] and result.get('public_keys_action'): write_authorized_keys(task['proposed_user']) if result.get('sudoers_entry_action'): write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) execution_result.append(dict(task=task, command_output=command_output))
python
def execute_plan(plan=None): """Create, Modify or Delete, depending on plan item.""" execution_result = list() for task in plan: action = task['action'] if action == 'delete': command = generate_delete_user_command(username=task.get('username'), manage_home=task['manage_home']) command_output = execute_command(command) execution_result.append(dict(task=task, command_output=command_output)) remove_sudoers_entry(username=task.get('username')) elif action == 'add': command = generate_add_user_command(proposed_user=task.get('proposed_user'), manage_home=task['manage_home']) command_output = execute_command(command) if task['proposed_user'].public_keys and task['manage_home'] and task['manage_keys']: write_authorized_keys(task['proposed_user']) if task['proposed_user'].sudoers_entry: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['proposed_user'].sudoers_entry) execution_result.append(dict(task=task, command_output=command_output)) elif action == 'update': result = task['user_comparison'].get('result') # Don't modify user if only keys have changed action_count = 0 for k, _ in iteritems(result): if '_action' in k: action_count += 1 command_output = None if task['manage_home'] and task['manage_keys'] and action_count == 1 and 'public_keys_action' in result: write_authorized_keys(task['proposed_user']) elif action_count == 1 and 'sudoers_entry_action' in result: write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) else: command = generate_modify_user_command(task=task) command_output = execute_command(command) if task['manage_home'] and task['manage_keys'] and result.get('public_keys_action'): write_authorized_keys(task['proposed_user']) if result.get('sudoers_entry_action'): write_sudoers_entry(username=task['proposed_user'].name, sudoers_entry=task['user_comparison']['result']['replacement_sudoers_entry']) execution_result.append(dict(task=task, command_output=command_output))
[ "def", "execute_plan", "(", "plan", "=", "None", ")", ":", "execution_result", "=", "list", "(", ")", "for", "task", "in", "plan", ":", "action", "=", "task", "[", "'action'", "]", "if", "action", "==", "'delete'", ":", "command", "=", "generate_delete_user_command", "(", "username", "=", "task", ".", "get", "(", "'username'", ")", ",", "manage_home", "=", "task", "[", "'manage_home'", "]", ")", "command_output", "=", "execute_command", "(", "command", ")", "execution_result", ".", "append", "(", "dict", "(", "task", "=", "task", ",", "command_output", "=", "command_output", ")", ")", "remove_sudoers_entry", "(", "username", "=", "task", ".", "get", "(", "'username'", ")", ")", "elif", "action", "==", "'add'", ":", "command", "=", "generate_add_user_command", "(", "proposed_user", "=", "task", ".", "get", "(", "'proposed_user'", ")", ",", "manage_home", "=", "task", "[", "'manage_home'", "]", ")", "command_output", "=", "execute_command", "(", "command", ")", "if", "task", "[", "'proposed_user'", "]", ".", "public_keys", "and", "task", "[", "'manage_home'", "]", "and", "task", "[", "'manage_keys'", "]", ":", "write_authorized_keys", "(", "task", "[", "'proposed_user'", "]", ")", "if", "task", "[", "'proposed_user'", "]", ".", "sudoers_entry", ":", "write_sudoers_entry", "(", "username", "=", "task", "[", "'proposed_user'", "]", ".", "name", ",", "sudoers_entry", "=", "task", "[", "'proposed_user'", "]", ".", "sudoers_entry", ")", "execution_result", ".", "append", "(", "dict", "(", "task", "=", "task", ",", "command_output", "=", "command_output", ")", ")", "elif", "action", "==", "'update'", ":", "result", "=", "task", "[", "'user_comparison'", "]", ".", "get", "(", "'result'", ")", "# Don't modify user if only keys have changed", "action_count", "=", "0", "for", "k", ",", "_", "in", "iteritems", "(", "result", ")", ":", "if", "'_action'", "in", "k", ":", "action_count", "+=", "1", "command_output", "=", "None", "if", "task", "[", "'manage_home'", "]", "and", "task", "[", "'manage_keys'", "]", "and", "action_count", "==", "1", "and", "'public_keys_action'", "in", "result", ":", "write_authorized_keys", "(", "task", "[", "'proposed_user'", "]", ")", "elif", "action_count", "==", "1", "and", "'sudoers_entry_action'", "in", "result", ":", "write_sudoers_entry", "(", "username", "=", "task", "[", "'proposed_user'", "]", ".", "name", ",", "sudoers_entry", "=", "task", "[", "'user_comparison'", "]", "[", "'result'", "]", "[", "'replacement_sudoers_entry'", "]", ")", "else", ":", "command", "=", "generate_modify_user_command", "(", "task", "=", "task", ")", "command_output", "=", "execute_command", "(", "command", ")", "if", "task", "[", "'manage_home'", "]", "and", "task", "[", "'manage_keys'", "]", "and", "result", ".", "get", "(", "'public_keys_action'", ")", ":", "write_authorized_keys", "(", "task", "[", "'proposed_user'", "]", ")", "if", "result", ".", "get", "(", "'sudoers_entry_action'", ")", ":", "write_sudoers_entry", "(", "username", "=", "task", "[", "'proposed_user'", "]", ".", "name", ",", "sudoers_entry", "=", "task", "[", "'user_comparison'", "]", "[", "'result'", "]", "[", "'replacement_sudoers_entry'", "]", ")", "execution_result", ".", "append", "(", "dict", "(", "task", "=", "task", ",", "command_output", "=", "command_output", ")", ")" ]
Create, Modify or Delete, depending on plan item.
[ "Create", "Modify", "or", "Delete", "depending", "on", "plan", "item", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/plan.py#L73-L113
night-crawler/django-docker-helpers
django_docker_helpers/config/backends/environment_parser.py
EnvironmentParser.get
def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from environment. If ``coerce_type`` is ``bool`` and no ``coercer`` specified, ``coerces`` forced to be :func:`~django_docker_helpers.utils.coerce_str_to_bool` :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default """ var_name = self.get_env_var_name(variable_path) val = self.env.get(var_name, self.sentinel) if val is self.sentinel: return default # coerce to bool with default env coercer if no coercer specified if coerce_type and coerce_type is bool and not coercer: coercer = coerce_str_to_bool return self.coerce(val, coerce_type=coerce_type, coercer=coercer)
python
def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from environment. If ``coerce_type`` is ``bool`` and no ``coercer`` specified, ``coerces`` forced to be :func:`~django_docker_helpers.utils.coerce_str_to_bool` :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default """ var_name = self.get_env_var_name(variable_path) val = self.env.get(var_name, self.sentinel) if val is self.sentinel: return default # coerce to bool with default env coercer if no coercer specified if coerce_type and coerce_type is bool and not coercer: coercer = coerce_str_to_bool return self.coerce(val, coerce_type=coerce_type, coercer=coercer)
[ "def", "get", "(", "self", ",", "variable_path", ":", "str", ",", "default", ":", "t", ".", "Optional", "[", "t", ".", "Any", "]", "=", "None", ",", "coerce_type", ":", "t", ".", "Optional", "[", "t", ".", "Type", "]", "=", "None", ",", "coercer", ":", "t", ".", "Optional", "[", "t", ".", "Callable", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "var_name", "=", "self", ".", "get_env_var_name", "(", "variable_path", ")", "val", "=", "self", ".", "env", ".", "get", "(", "var_name", ",", "self", ".", "sentinel", ")", "if", "val", "is", "self", ".", "sentinel", ":", "return", "default", "# coerce to bool with default env coercer if no coercer specified", "if", "coerce_type", "and", "coerce_type", "is", "bool", "and", "not", "coercer", ":", "coercer", "=", "coerce_str_to_bool", "return", "self", ".", "coerce", "(", "val", ",", "coerce_type", "=", "coerce_type", ",", "coercer", "=", "coercer", ")" ]
Reads a value of ``variable_path`` from environment. If ``coerce_type`` is ``bool`` and no ``coercer`` specified, ``coerces`` forced to be :func:`~django_docker_helpers.utils.coerce_str_to_bool` :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default
[ "Reads", "a", "value", "of", "variable_path", "from", "environment", "." ]
train
https://github.com/night-crawler/django-docker-helpers/blob/b64f8009101a8eb61d3841124ba19e3ab881aa2f/django_docker_helpers/config/backends/environment_parser.py#L69-L98
alefnula/tea
tea/utils/compress.py
unzip
def unzip(archive, destination, filenames=None): """Unzip a zip archive into destination directory. It unzips either the whole archive or specific file(s) from the archive. Usage: >>> output = os.path.join(os.getcwd(), 'output') >>> # Archive can be an instance of a ZipFile class >>> archive = zipfile.ZipFile('test.zip', 'r') >>> # Or just a filename >>> archive = 'test.zip' >>> # Extracts all files >>> unzip(archive, output) >>> # Extract only one file >>> unzip(archive, output, 'my_file.txt') >>> # Extract a list of files >>> unzip(archive, output, ['my_file1.txt', 'my_file2.txt']) >>> unzip_file('test.zip', 'my_file.txt', output) Args: archive (zipfile.ZipFile or str): Zipfile object to extract from or path to the zip archive. destination (str): Path to the output directory. filenames (str or list of str or None): Path(s) to the filename(s) inside the zip archive that you want to extract. """ close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, "r", allowZip64=True) close = True logger.info("Extracting: %s -> %s" % (archive.filename, destination)) if isinstance(filenames, str): filenames = [filenames] if filenames is None: # extract all filenames = archive.namelist() for filename in filenames: if filename.endswith("/"): # it's a directory shell.mkdir(os.path.join(destination, filename)) else: if not _extract_file(archive, destination, filename): raise Exception() logger.info('Extracting zip archive "%s" succeeded' % archive.filename) return True except Exception: logger.exception("Error while unzipping archive %s" % archive.filename) return False finally: if close: archive.close()
python
def unzip(archive, destination, filenames=None): """Unzip a zip archive into destination directory. It unzips either the whole archive or specific file(s) from the archive. Usage: >>> output = os.path.join(os.getcwd(), 'output') >>> # Archive can be an instance of a ZipFile class >>> archive = zipfile.ZipFile('test.zip', 'r') >>> # Or just a filename >>> archive = 'test.zip' >>> # Extracts all files >>> unzip(archive, output) >>> # Extract only one file >>> unzip(archive, output, 'my_file.txt') >>> # Extract a list of files >>> unzip(archive, output, ['my_file1.txt', 'my_file2.txt']) >>> unzip_file('test.zip', 'my_file.txt', output) Args: archive (zipfile.ZipFile or str): Zipfile object to extract from or path to the zip archive. destination (str): Path to the output directory. filenames (str or list of str or None): Path(s) to the filename(s) inside the zip archive that you want to extract. """ close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, "r", allowZip64=True) close = True logger.info("Extracting: %s -> %s" % (archive.filename, destination)) if isinstance(filenames, str): filenames = [filenames] if filenames is None: # extract all filenames = archive.namelist() for filename in filenames: if filename.endswith("/"): # it's a directory shell.mkdir(os.path.join(destination, filename)) else: if not _extract_file(archive, destination, filename): raise Exception() logger.info('Extracting zip archive "%s" succeeded' % archive.filename) return True except Exception: logger.exception("Error while unzipping archive %s" % archive.filename) return False finally: if close: archive.close()
[ "def", "unzip", "(", "archive", ",", "destination", ",", "filenames", "=", "None", ")", ":", "close", "=", "False", "try", ":", "if", "not", "isinstance", "(", "archive", ",", "zipfile", ".", "ZipFile", ")", ":", "archive", "=", "zipfile", ".", "ZipFile", "(", "archive", ",", "\"r\"", ",", "allowZip64", "=", "True", ")", "close", "=", "True", "logger", ".", "info", "(", "\"Extracting: %s -> %s\"", "%", "(", "archive", ".", "filename", ",", "destination", ")", ")", "if", "isinstance", "(", "filenames", ",", "str", ")", ":", "filenames", "=", "[", "filenames", "]", "if", "filenames", "is", "None", ":", "# extract all", "filenames", "=", "archive", ".", "namelist", "(", ")", "for", "filename", "in", "filenames", ":", "if", "filename", ".", "endswith", "(", "\"/\"", ")", ":", "# it's a directory", "shell", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "destination", ",", "filename", ")", ")", "else", ":", "if", "not", "_extract_file", "(", "archive", ",", "destination", ",", "filename", ")", ":", "raise", "Exception", "(", ")", "logger", ".", "info", "(", "'Extracting zip archive \"%s\" succeeded'", "%", "archive", ".", "filename", ")", "return", "True", "except", "Exception", ":", "logger", ".", "exception", "(", "\"Error while unzipping archive %s\"", "%", "archive", ".", "filename", ")", "return", "False", "finally", ":", "if", "close", ":", "archive", ".", "close", "(", ")" ]
Unzip a zip archive into destination directory. It unzips either the whole archive or specific file(s) from the archive. Usage: >>> output = os.path.join(os.getcwd(), 'output') >>> # Archive can be an instance of a ZipFile class >>> archive = zipfile.ZipFile('test.zip', 'r') >>> # Or just a filename >>> archive = 'test.zip' >>> # Extracts all files >>> unzip(archive, output) >>> # Extract only one file >>> unzip(archive, output, 'my_file.txt') >>> # Extract a list of files >>> unzip(archive, output, ['my_file1.txt', 'my_file2.txt']) >>> unzip_file('test.zip', 'my_file.txt', output) Args: archive (zipfile.ZipFile or str): Zipfile object to extract from or path to the zip archive. destination (str): Path to the output directory. filenames (str or list of str or None): Path(s) to the filename(s) inside the zip archive that you want to extract.
[ "Unzip", "a", "zip", "archive", "into", "destination", "directory", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/utils/compress.py#L40-L89
alefnula/tea
tea/utils/compress.py
mkzip
def mkzip(archive, items, mode="w", save_full_paths=False): """Recursively zip a directory. Args: archive (zipfile.ZipFile or str): ZipFile object add to or path to the output zip archive. items (str or list of str): Single item or list of items (files and directories) to be added to zipfile. mode (str): w for create new and write a for append to. save_full_paths (bool): Preserve full paths. """ close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, mode, allowZip64=True) close = True logger.info("mkdzip: Creating %s, from: %s", archive.filename, items) if isinstance(items, str): items = [items] for item in items: item = os.path.abspath(item) basename = os.path.basename(item) if os.path.isdir(item): for root, directoires, filenames in os.walk(item): for filename in filenames: path = os.path.join(root, filename) if save_full_paths: archive_path = path.encode("utf-8") else: archive_path = os.path.join( basename, path.replace(item, "").strip("\\/") ).encode("utf-8") archive.write(path, archive_path) elif os.path.isfile(item): if save_full_paths: archive_name = item.encode("utf-8") else: archive_name = basename.encode("utf-8") archive.write(item, archive_name) # , zipfile.ZIP_DEFLATED) return True except Exception as e: logger.error("Error occurred during mkzip: %s" % e) return False finally: if close: archive.close()
python
def mkzip(archive, items, mode="w", save_full_paths=False): """Recursively zip a directory. Args: archive (zipfile.ZipFile or str): ZipFile object add to or path to the output zip archive. items (str or list of str): Single item or list of items (files and directories) to be added to zipfile. mode (str): w for create new and write a for append to. save_full_paths (bool): Preserve full paths. """ close = False try: if not isinstance(archive, zipfile.ZipFile): archive = zipfile.ZipFile(archive, mode, allowZip64=True) close = True logger.info("mkdzip: Creating %s, from: %s", archive.filename, items) if isinstance(items, str): items = [items] for item in items: item = os.path.abspath(item) basename = os.path.basename(item) if os.path.isdir(item): for root, directoires, filenames in os.walk(item): for filename in filenames: path = os.path.join(root, filename) if save_full_paths: archive_path = path.encode("utf-8") else: archive_path = os.path.join( basename, path.replace(item, "").strip("\\/") ).encode("utf-8") archive.write(path, archive_path) elif os.path.isfile(item): if save_full_paths: archive_name = item.encode("utf-8") else: archive_name = basename.encode("utf-8") archive.write(item, archive_name) # , zipfile.ZIP_DEFLATED) return True except Exception as e: logger.error("Error occurred during mkzip: %s" % e) return False finally: if close: archive.close()
[ "def", "mkzip", "(", "archive", ",", "items", ",", "mode", "=", "\"w\"", ",", "save_full_paths", "=", "False", ")", ":", "close", "=", "False", "try", ":", "if", "not", "isinstance", "(", "archive", ",", "zipfile", ".", "ZipFile", ")", ":", "archive", "=", "zipfile", ".", "ZipFile", "(", "archive", ",", "mode", ",", "allowZip64", "=", "True", ")", "close", "=", "True", "logger", ".", "info", "(", "\"mkdzip: Creating %s, from: %s\"", ",", "archive", ".", "filename", ",", "items", ")", "if", "isinstance", "(", "items", ",", "str", ")", ":", "items", "=", "[", "items", "]", "for", "item", "in", "items", ":", "item", "=", "os", ".", "path", ".", "abspath", "(", "item", ")", "basename", "=", "os", ".", "path", ".", "basename", "(", "item", ")", "if", "os", ".", "path", ".", "isdir", "(", "item", ")", ":", "for", "root", ",", "directoires", ",", "filenames", "in", "os", ".", "walk", "(", "item", ")", ":", "for", "filename", "in", "filenames", ":", "path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "if", "save_full_paths", ":", "archive_path", "=", "path", ".", "encode", "(", "\"utf-8\"", ")", "else", ":", "archive_path", "=", "os", ".", "path", ".", "join", "(", "basename", ",", "path", ".", "replace", "(", "item", ",", "\"\"", ")", ".", "strip", "(", "\"\\\\/\"", ")", ")", ".", "encode", "(", "\"utf-8\"", ")", "archive", ".", "write", "(", "path", ",", "archive_path", ")", "elif", "os", ".", "path", ".", "isfile", "(", "item", ")", ":", "if", "save_full_paths", ":", "archive_name", "=", "item", ".", "encode", "(", "\"utf-8\"", ")", "else", ":", "archive_name", "=", "basename", ".", "encode", "(", "\"utf-8\"", ")", "archive", ".", "write", "(", "item", ",", "archive_name", ")", "# , zipfile.ZIP_DEFLATED)", "return", "True", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "\"Error occurred during mkzip: %s\"", "%", "e", ")", "return", "False", "finally", ":", "if", "close", ":", "archive", ".", "close", "(", ")" ]
Recursively zip a directory. Args: archive (zipfile.ZipFile or str): ZipFile object add to or path to the output zip archive. items (str or list of str): Single item or list of items (files and directories) to be added to zipfile. mode (str): w for create new and write a for append to. save_full_paths (bool): Preserve full paths.
[ "Recursively", "zip", "a", "directory", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/utils/compress.py#L92-L137
alefnula/tea
tea/utils/compress.py
seven_zip
def seven_zip(archive, items, self_extracting=False): """Create a 7z archive.""" if not isinstance(items, (list, tuple)): items = [items] if self_extracting: return er(_get_sz(), "a", "-ssw", "-sfx", archive, *items) else: return er(_get_sz(), "a", "-ssw", archive, *items)
python
def seven_zip(archive, items, self_extracting=False): """Create a 7z archive.""" if not isinstance(items, (list, tuple)): items = [items] if self_extracting: return er(_get_sz(), "a", "-ssw", "-sfx", archive, *items) else: return er(_get_sz(), "a", "-ssw", archive, *items)
[ "def", "seven_zip", "(", "archive", ",", "items", ",", "self_extracting", "=", "False", ")", ":", "if", "not", "isinstance", "(", "items", ",", "(", "list", ",", "tuple", ")", ")", ":", "items", "=", "[", "items", "]", "if", "self_extracting", ":", "return", "er", "(", "_get_sz", "(", ")", ",", "\"a\"", ",", "\"-ssw\"", ",", "\"-sfx\"", ",", "archive", ",", "*", "items", ")", "else", ":", "return", "er", "(", "_get_sz", "(", ")", ",", "\"a\"", ",", "\"-ssw\"", ",", "archive", ",", "*", "items", ")" ]
Create a 7z archive.
[ "Create", "a", "7z", "archive", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/utils/compress.py#L158-L165
night-crawler/django-docker-helpers
django_docker_helpers/db.py
ensure_caches_alive
def ensure_caches_alive(max_retries: int = 100, retry_timeout: int = 5, exit_on_failure: bool = True) -> bool: """ Checks every cache backend alias in ``settings.CACHES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. It sets the ``django-docker-helpers:available-check`` key for every cache backend to ensure it's receiving connections. If check is passed the key is deleted. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: a number of attempts to reach cache backend, default is ``100`` :param int retry_timeout: a timeout in seconds between attempts, default is ``5`` :return: ``True`` if all backends are available ``False`` if any backend check failed """ for cache_alias in settings.CACHES.keys(): cache = caches[cache_alias] wf('Checking if the cache backed is accessible for the alias `%s`... ' % cache_alias, False) for i in range(max_retries): try: cache.set('django-docker-helpers:available-check', '1') assert cache.get('django-docker-helpers:available-check') == '1' cache.delete('django-docker-helpers:available-check') wf('[+]\n') break except Exception as e: wf(str(e) + '\n') sleep(retry_timeout) else: wf('Tried %s time(s). Shutting down.\n' % max_retries) exit_on_failure and exit(1) return False return True
python
def ensure_caches_alive(max_retries: int = 100, retry_timeout: int = 5, exit_on_failure: bool = True) -> bool: """ Checks every cache backend alias in ``settings.CACHES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. It sets the ``django-docker-helpers:available-check`` key for every cache backend to ensure it's receiving connections. If check is passed the key is deleted. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: a number of attempts to reach cache backend, default is ``100`` :param int retry_timeout: a timeout in seconds between attempts, default is ``5`` :return: ``True`` if all backends are available ``False`` if any backend check failed """ for cache_alias in settings.CACHES.keys(): cache = caches[cache_alias] wf('Checking if the cache backed is accessible for the alias `%s`... ' % cache_alias, False) for i in range(max_retries): try: cache.set('django-docker-helpers:available-check', '1') assert cache.get('django-docker-helpers:available-check') == '1' cache.delete('django-docker-helpers:available-check') wf('[+]\n') break except Exception as e: wf(str(e) + '\n') sleep(retry_timeout) else: wf('Tried %s time(s). Shutting down.\n' % max_retries) exit_on_failure and exit(1) return False return True
[ "def", "ensure_caches_alive", "(", "max_retries", ":", "int", "=", "100", ",", "retry_timeout", ":", "int", "=", "5", ",", "exit_on_failure", ":", "bool", "=", "True", ")", "->", "bool", ":", "for", "cache_alias", "in", "settings", ".", "CACHES", ".", "keys", "(", ")", ":", "cache", "=", "caches", "[", "cache_alias", "]", "wf", "(", "'Checking if the cache backed is accessible for the alias `%s`... '", "%", "cache_alias", ",", "False", ")", "for", "i", "in", "range", "(", "max_retries", ")", ":", "try", ":", "cache", ".", "set", "(", "'django-docker-helpers:available-check'", ",", "'1'", ")", "assert", "cache", ".", "get", "(", "'django-docker-helpers:available-check'", ")", "==", "'1'", "cache", ".", "delete", "(", "'django-docker-helpers:available-check'", ")", "wf", "(", "'[+]\\n'", ")", "break", "except", "Exception", "as", "e", ":", "wf", "(", "str", "(", "e", ")", "+", "'\\n'", ")", "sleep", "(", "retry_timeout", ")", "else", ":", "wf", "(", "'Tried %s time(s). Shutting down.\\n'", "%", "max_retries", ")", "exit_on_failure", "and", "exit", "(", "1", ")", "return", "False", "return", "True" ]
Checks every cache backend alias in ``settings.CACHES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. It sets the ``django-docker-helpers:available-check`` key for every cache backend to ensure it's receiving connections. If check is passed the key is deleted. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: a number of attempts to reach cache backend, default is ``100`` :param int retry_timeout: a timeout in seconds between attempts, default is ``5`` :return: ``True`` if all backends are available ``False`` if any backend check failed
[ "Checks", "every", "cache", "backend", "alias", "in", "settings", ".", "CACHES", "until", "it", "becomes", "available", ".", "After", "max_retries", "attempts", "to", "reach", "any", "backend", "are", "failed", "it", "returns", "False", ".", "If", "exit_on_failure", "is", "set", "it", "shuts", "down", "with", "exit", "(", "1", ")", "." ]
train
https://github.com/night-crawler/django-docker-helpers/blob/b64f8009101a8eb61d3841124ba19e3ab881aa2f/django_docker_helpers/db.py#L12-L45
night-crawler/django-docker-helpers
django_docker_helpers/db.py
ensure_databases_alive
def ensure_databases_alive(max_retries: int = 100, retry_timeout: int = 5, exit_on_failure: bool = True) -> bool: """ Checks every database alias in ``settings.DATABASES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. For every database alias it tries to ``SELECT 1``. If no errors raised it checks the next alias. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: number of attempts to reach every database; default is ``100`` :param int retry_timeout: timeout in seconds between attempts :return: ``True`` if all backends are available, ``False`` if any backend check failed """ template = """ ============================= Checking database connection `{CONNECTION}`: Engine: {ENGINE} Host: {HOST} Database: {NAME} User: {USER} Password: {PASSWORD} =============================\n""" for connection_name in connections: _db_settings = dict.fromkeys(['ENGINE', 'HOST', 'NAME', 'USER', 'PASSWORD']) _db_settings.update(settings.DATABASES[connection_name]) _db_settings['CONNECTION'] = connection_name if _db_settings.get('PASSWORD'): _db_settings['PASSWORD'] = 'set' wf(template.format(**_db_settings)) wf('Checking db connection alive... ', False) for i in range(max_retries): try: cursor = connections[connection_name].cursor() cursor.execute('SELECT 1') cursor.fetchone() wf('[+]\n') break except OperationalError as e: wf(str(e)) sleep(retry_timeout) else: wf('Tried %s time(s). Shutting down.\n' % max_retries) exit_on_failure and exit(1) return False return True
python
def ensure_databases_alive(max_retries: int = 100, retry_timeout: int = 5, exit_on_failure: bool = True) -> bool: """ Checks every database alias in ``settings.DATABASES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. For every database alias it tries to ``SELECT 1``. If no errors raised it checks the next alias. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: number of attempts to reach every database; default is ``100`` :param int retry_timeout: timeout in seconds between attempts :return: ``True`` if all backends are available, ``False`` if any backend check failed """ template = """ ============================= Checking database connection `{CONNECTION}`: Engine: {ENGINE} Host: {HOST} Database: {NAME} User: {USER} Password: {PASSWORD} =============================\n""" for connection_name in connections: _db_settings = dict.fromkeys(['ENGINE', 'HOST', 'NAME', 'USER', 'PASSWORD']) _db_settings.update(settings.DATABASES[connection_name]) _db_settings['CONNECTION'] = connection_name if _db_settings.get('PASSWORD'): _db_settings['PASSWORD'] = 'set' wf(template.format(**_db_settings)) wf('Checking db connection alive... ', False) for i in range(max_retries): try: cursor = connections[connection_name].cursor() cursor.execute('SELECT 1') cursor.fetchone() wf('[+]\n') break except OperationalError as e: wf(str(e)) sleep(retry_timeout) else: wf('Tried %s time(s). Shutting down.\n' % max_retries) exit_on_failure and exit(1) return False return True
[ "def", "ensure_databases_alive", "(", "max_retries", ":", "int", "=", "100", ",", "retry_timeout", ":", "int", "=", "5", ",", "exit_on_failure", ":", "bool", "=", "True", ")", "->", "bool", ":", "template", "=", "\"\"\"\n =============================\n Checking database connection `{CONNECTION}`:\n Engine: {ENGINE}\n Host: {HOST}\n Database: {NAME}\n User: {USER}\n Password: {PASSWORD}\n =============================\\n\"\"\"", "for", "connection_name", "in", "connections", ":", "_db_settings", "=", "dict", ".", "fromkeys", "(", "[", "'ENGINE'", ",", "'HOST'", ",", "'NAME'", ",", "'USER'", ",", "'PASSWORD'", "]", ")", "_db_settings", ".", "update", "(", "settings", ".", "DATABASES", "[", "connection_name", "]", ")", "_db_settings", "[", "'CONNECTION'", "]", "=", "connection_name", "if", "_db_settings", ".", "get", "(", "'PASSWORD'", ")", ":", "_db_settings", "[", "'PASSWORD'", "]", "=", "'set'", "wf", "(", "template", ".", "format", "(", "*", "*", "_db_settings", ")", ")", "wf", "(", "'Checking db connection alive... '", ",", "False", ")", "for", "i", "in", "range", "(", "max_retries", ")", ":", "try", ":", "cursor", "=", "connections", "[", "connection_name", "]", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "'SELECT 1'", ")", "cursor", ".", "fetchone", "(", ")", "wf", "(", "'[+]\\n'", ")", "break", "except", "OperationalError", "as", "e", ":", "wf", "(", "str", "(", "e", ")", ")", "sleep", "(", "retry_timeout", ")", "else", ":", "wf", "(", "'Tried %s time(s). Shutting down.\\n'", "%", "max_retries", ")", "exit_on_failure", "and", "exit", "(", "1", ")", "return", "False", "return", "True" ]
Checks every database alias in ``settings.DATABASES`` until it becomes available. After ``max_retries`` attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with ``exit(1)``. For every database alias it tries to ``SELECT 1``. If no errors raised it checks the next alias. :param exit_on_failure: set to ``True`` if there's no sense to continue :param int max_retries: number of attempts to reach every database; default is ``100`` :param int retry_timeout: timeout in seconds between attempts :return: ``True`` if all backends are available, ``False`` if any backend check failed
[ "Checks", "every", "database", "alias", "in", "settings", ".", "DATABASES", "until", "it", "becomes", "available", ".", "After", "max_retries", "attempts", "to", "reach", "any", "backend", "are", "failed", "it", "returns", "False", ".", "If", "exit_on_failure", "is", "set", "it", "shuts", "down", "with", "exit", "(", "1", ")", "." ]
train
https://github.com/night-crawler/django-docker-helpers/blob/b64f8009101a8eb61d3841124ba19e3ab881aa2f/django_docker_helpers/db.py#L49-L98
night-crawler/django-docker-helpers
django_docker_helpers/db.py
migrate
def migrate(*argv) -> bool: """ Runs Django migrate command. :return: always ``True`` """ wf('Applying migrations... ', False) execute_from_command_line(['./manage.py', 'migrate'] + list(argv)) wf('[+]\n') return True
python
def migrate(*argv) -> bool: """ Runs Django migrate command. :return: always ``True`` """ wf('Applying migrations... ', False) execute_from_command_line(['./manage.py', 'migrate'] + list(argv)) wf('[+]\n') return True
[ "def", "migrate", "(", "*", "argv", ")", "->", "bool", ":", "wf", "(", "'Applying migrations... '", ",", "False", ")", "execute_from_command_line", "(", "[", "'./manage.py'", ",", "'migrate'", "]", "+", "list", "(", "argv", ")", ")", "wf", "(", "'[+]\\n'", ")", "return", "True" ]
Runs Django migrate command. :return: always ``True``
[ "Runs", "Django", "migrate", "command", "." ]
train
https://github.com/night-crawler/django-docker-helpers/blob/b64f8009101a8eb61d3841124ba19e3ab881aa2f/django_docker_helpers/db.py#L102-L111
hufman/flask_rdf
flask_rdf/wsgi.py
Decorator.output
def output(self, output, accepts, set_http_code, set_content_type): """ Formats a response from a WSGI app to handle any RDF graphs If a view function returns a single RDF graph, serialize it based on Accept header If it's not an RDF graph, return it without any special handling """ graph = Decorator._get_graph(output) if graph is not None: # decide the format output_mimetype, output_format = self.format_selector.decide(accepts, graph.context_aware) # requested content couldn't find anything if output_mimetype is None: set_http_code("406 Not Acceptable") return ['406 Not Acceptable'.encode('utf-8')] # explicitly mark text mimetypes as utf-8 if 'text' in output_mimetype: output_mimetype = output_mimetype + '; charset=utf-8' # format the new response serialized = graph.serialize(format=output_format) set_content_type(output_mimetype) return [serialized] else: return output
python
def output(self, output, accepts, set_http_code, set_content_type): """ Formats a response from a WSGI app to handle any RDF graphs If a view function returns a single RDF graph, serialize it based on Accept header If it's not an RDF graph, return it without any special handling """ graph = Decorator._get_graph(output) if graph is not None: # decide the format output_mimetype, output_format = self.format_selector.decide(accepts, graph.context_aware) # requested content couldn't find anything if output_mimetype is None: set_http_code("406 Not Acceptable") return ['406 Not Acceptable'.encode('utf-8')] # explicitly mark text mimetypes as utf-8 if 'text' in output_mimetype: output_mimetype = output_mimetype + '; charset=utf-8' # format the new response serialized = graph.serialize(format=output_format) set_content_type(output_mimetype) return [serialized] else: return output
[ "def", "output", "(", "self", ",", "output", ",", "accepts", ",", "set_http_code", ",", "set_content_type", ")", ":", "graph", "=", "Decorator", ".", "_get_graph", "(", "output", ")", "if", "graph", "is", "not", "None", ":", "# decide the format", "output_mimetype", ",", "output_format", "=", "self", ".", "format_selector", ".", "decide", "(", "accepts", ",", "graph", ".", "context_aware", ")", "# requested content couldn't find anything", "if", "output_mimetype", "is", "None", ":", "set_http_code", "(", "\"406 Not Acceptable\"", ")", "return", "[", "'406 Not Acceptable'", ".", "encode", "(", "'utf-8'", ")", "]", "# explicitly mark text mimetypes as utf-8", "if", "'text'", "in", "output_mimetype", ":", "output_mimetype", "=", "output_mimetype", "+", "'; charset=utf-8'", "# format the new response", "serialized", "=", "graph", ".", "serialize", "(", "format", "=", "output_format", ")", "set_content_type", "(", "output_mimetype", ")", "return", "[", "serialized", "]", "else", ":", "return", "output" ]
Formats a response from a WSGI app to handle any RDF graphs If a view function returns a single RDF graph, serialize it based on Accept header If it's not an RDF graph, return it without any special handling
[ "Formats", "a", "response", "from", "a", "WSGI", "app", "to", "handle", "any", "RDF", "graphs", "If", "a", "view", "function", "returns", "a", "single", "RDF", "graph", "serialize", "it", "based", "on", "Accept", "header", "If", "it", "s", "not", "an", "RDF", "graph", "return", "it", "without", "any", "special", "handling" ]
train
https://github.com/hufman/flask_rdf/blob/9bf86023288171eb0665c15fb28070250f80310c/flask_rdf/wsgi.py#L24-L47
hufman/flask_rdf
flask_rdf/wsgi.py
Decorator.decorate
def decorate(self, app): """ Wraps a WSGI application to return formatted RDF graphs Uses content negotiation to serialize the graph to the client-preferred format Passes other content through unmodified """ from functools import wraps @wraps(app) def decorated(environ, start_response): # capture any start_response from the app app_response = {} app_response['status'] = "200 OK" app_response['headers'] = [] app_response['written'] = BytesIO() def custom_start_response(status, headers, *args, **kwargs): app_response['status'] = status app_response['headers'] = headers app_response['args'] = args app_response['kwargs'] = kwargs return app_response['written'].write returned = app(environ, custom_start_response) # callbacks from the serialization def set_http_code(status): app_response['status'] = str(status) def set_header(header, value): app_response['headers'] = [(h,v) for (h,v) in app_response['headers'] if h.lower() != header.lower()] app_response['headers'].append((header, value)) def set_content_type(content_type): set_header('Content-Type', content_type) # do the serialization accept = environ.get('HTTP_ACCEPT', '') new_return = self.output(returned, accept, set_http_code, set_content_type) # set the Vary header vary_headers = (v for (h,v) in app_response['headers'] if h.lower() == 'vary') vary_elements = list(itertools.chain(*[v.split(',') for v in vary_headers])) vary_elements = list(set([v.strip() for v in vary_elements])) if '*' not in vary_elements and 'accept' not in (v.lower() for v in vary_elements): vary_elements.append('Accept') set_header('Vary', ', '.join(vary_elements)) # pass on the result to the parent WSGI server parent_writer = start_response(app_response['status'], app_response['headers'], *app_response.get('args', []), **app_response.get('kwargs', {})) written = app_response['written'].getvalue() if len(written) > 0: parent_writer(written) return new_return return decorated
python
def decorate(self, app): """ Wraps a WSGI application to return formatted RDF graphs Uses content negotiation to serialize the graph to the client-preferred format Passes other content through unmodified """ from functools import wraps @wraps(app) def decorated(environ, start_response): # capture any start_response from the app app_response = {} app_response['status'] = "200 OK" app_response['headers'] = [] app_response['written'] = BytesIO() def custom_start_response(status, headers, *args, **kwargs): app_response['status'] = status app_response['headers'] = headers app_response['args'] = args app_response['kwargs'] = kwargs return app_response['written'].write returned = app(environ, custom_start_response) # callbacks from the serialization def set_http_code(status): app_response['status'] = str(status) def set_header(header, value): app_response['headers'] = [(h,v) for (h,v) in app_response['headers'] if h.lower() != header.lower()] app_response['headers'].append((header, value)) def set_content_type(content_type): set_header('Content-Type', content_type) # do the serialization accept = environ.get('HTTP_ACCEPT', '') new_return = self.output(returned, accept, set_http_code, set_content_type) # set the Vary header vary_headers = (v for (h,v) in app_response['headers'] if h.lower() == 'vary') vary_elements = list(itertools.chain(*[v.split(',') for v in vary_headers])) vary_elements = list(set([v.strip() for v in vary_elements])) if '*' not in vary_elements and 'accept' not in (v.lower() for v in vary_elements): vary_elements.append('Accept') set_header('Vary', ', '.join(vary_elements)) # pass on the result to the parent WSGI server parent_writer = start_response(app_response['status'], app_response['headers'], *app_response.get('args', []), **app_response.get('kwargs', {})) written = app_response['written'].getvalue() if len(written) > 0: parent_writer(written) return new_return return decorated
[ "def", "decorate", "(", "self", ",", "app", ")", ":", "from", "functools", "import", "wraps", "@", "wraps", "(", "app", ")", "def", "decorated", "(", "environ", ",", "start_response", ")", ":", "# capture any start_response from the app", "app_response", "=", "{", "}", "app_response", "[", "'status'", "]", "=", "\"200 OK\"", "app_response", "[", "'headers'", "]", "=", "[", "]", "app_response", "[", "'written'", "]", "=", "BytesIO", "(", ")", "def", "custom_start_response", "(", "status", ",", "headers", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "app_response", "[", "'status'", "]", "=", "status", "app_response", "[", "'headers'", "]", "=", "headers", "app_response", "[", "'args'", "]", "=", "args", "app_response", "[", "'kwargs'", "]", "=", "kwargs", "return", "app_response", "[", "'written'", "]", ".", "write", "returned", "=", "app", "(", "environ", ",", "custom_start_response", ")", "# callbacks from the serialization", "def", "set_http_code", "(", "status", ")", ":", "app_response", "[", "'status'", "]", "=", "str", "(", "status", ")", "def", "set_header", "(", "header", ",", "value", ")", ":", "app_response", "[", "'headers'", "]", "=", "[", "(", "h", ",", "v", ")", "for", "(", "h", ",", "v", ")", "in", "app_response", "[", "'headers'", "]", "if", "h", ".", "lower", "(", ")", "!=", "header", ".", "lower", "(", ")", "]", "app_response", "[", "'headers'", "]", ".", "append", "(", "(", "header", ",", "value", ")", ")", "def", "set_content_type", "(", "content_type", ")", ":", "set_header", "(", "'Content-Type'", ",", "content_type", ")", "# do the serialization", "accept", "=", "environ", ".", "get", "(", "'HTTP_ACCEPT'", ",", "''", ")", "new_return", "=", "self", ".", "output", "(", "returned", ",", "accept", ",", "set_http_code", ",", "set_content_type", ")", "# set the Vary header", "vary_headers", "=", "(", "v", "for", "(", "h", ",", "v", ")", "in", "app_response", "[", "'headers'", "]", "if", "h", ".", "lower", "(", ")", "==", "'vary'", ")", "vary_elements", "=", "list", "(", "itertools", ".", "chain", "(", "*", "[", "v", ".", "split", "(", "','", ")", "for", "v", "in", "vary_headers", "]", ")", ")", "vary_elements", "=", "list", "(", "set", "(", "[", "v", ".", "strip", "(", ")", "for", "v", "in", "vary_elements", "]", ")", ")", "if", "'*'", "not", "in", "vary_elements", "and", "'accept'", "not", "in", "(", "v", ".", "lower", "(", ")", "for", "v", "in", "vary_elements", ")", ":", "vary_elements", ".", "append", "(", "'Accept'", ")", "set_header", "(", "'Vary'", ",", "', '", ".", "join", "(", "vary_elements", ")", ")", "# pass on the result to the parent WSGI server", "parent_writer", "=", "start_response", "(", "app_response", "[", "'status'", "]", ",", "app_response", "[", "'headers'", "]", ",", "*", "app_response", ".", "get", "(", "'args'", ",", "[", "]", ")", ",", "*", "*", "app_response", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "written", "=", "app_response", "[", "'written'", "]", ".", "getvalue", "(", ")", "if", "len", "(", "written", ")", ">", "0", ":", "parent_writer", "(", "written", ")", "return", "new_return", "return", "decorated" ]
Wraps a WSGI application to return formatted RDF graphs Uses content negotiation to serialize the graph to the client-preferred format Passes other content through unmodified
[ "Wraps", "a", "WSGI", "application", "to", "return", "formatted", "RDF", "graphs", "Uses", "content", "negotiation", "to", "serialize", "the", "graph", "to", "the", "client", "-", "preferred", "format", "Passes", "other", "content", "through", "unmodified" ]
train
https://github.com/hufman/flask_rdf/blob/9bf86023288171eb0665c15fb28070250f80310c/flask_rdf/wsgi.py#L49-L101
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalNameHandlerDecorator.is_handler
def is_handler(cls, name, value): """Detect an handler and return its wanted signal name.""" signal_name = False config = None if callable(value) and hasattr(value, SPEC_CONTAINER_MEMBER_NAME): spec = getattr(value, SPEC_CONTAINER_MEMBER_NAME) if spec['kind'] == 'handler': signal_name = spec['name'] config = spec['config'] return signal_name, config
python
def is_handler(cls, name, value): """Detect an handler and return its wanted signal name.""" signal_name = False config = None if callable(value) and hasattr(value, SPEC_CONTAINER_MEMBER_NAME): spec = getattr(value, SPEC_CONTAINER_MEMBER_NAME) if spec['kind'] == 'handler': signal_name = spec['name'] config = spec['config'] return signal_name, config
[ "def", "is_handler", "(", "cls", ",", "name", ",", "value", ")", ":", "signal_name", "=", "False", "config", "=", "None", "if", "callable", "(", "value", ")", "and", "hasattr", "(", "value", ",", "SPEC_CONTAINER_MEMBER_NAME", ")", ":", "spec", "=", "getattr", "(", "value", ",", "SPEC_CONTAINER_MEMBER_NAME", ")", "if", "spec", "[", "'kind'", "]", "==", "'handler'", ":", "signal_name", "=", "spec", "[", "'name'", "]", "config", "=", "spec", "[", "'config'", "]", "return", "signal_name", ",", "config" ]
Detect an handler and return its wanted signal name.
[ "Detect", "an", "handler", "and", "return", "its", "wanted", "signal", "name", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L36-L45
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
InheritanceToolsMeta._build_inheritance_chain
def _build_inheritance_chain(cls, bases, *names, merge=False): """For all of the names build a ChainMap containing a map for every base class.""" result = [] for name in names: maps = [] for base in bases: bmap = getattr(base, name, None) if bmap is not None: assert isinstance(bmap, (dict, ChainMap)) if len(bmap): if isinstance(bmap, ChainMap): maps.extend(bmap.maps) else: maps.append(bmap) result.append(ChainMap({}, *maps)) if merge: result = [dict(map) for map in result] if len(names) == 1: return result[0] return result
python
def _build_inheritance_chain(cls, bases, *names, merge=False): """For all of the names build a ChainMap containing a map for every base class.""" result = [] for name in names: maps = [] for base in bases: bmap = getattr(base, name, None) if bmap is not None: assert isinstance(bmap, (dict, ChainMap)) if len(bmap): if isinstance(bmap, ChainMap): maps.extend(bmap.maps) else: maps.append(bmap) result.append(ChainMap({}, *maps)) if merge: result = [dict(map) for map in result] if len(names) == 1: return result[0] return result
[ "def", "_build_inheritance_chain", "(", "cls", ",", "bases", ",", "*", "names", ",", "merge", "=", "False", ")", ":", "result", "=", "[", "]", "for", "name", "in", "names", ":", "maps", "=", "[", "]", "for", "base", "in", "bases", ":", "bmap", "=", "getattr", "(", "base", ",", "name", ",", "None", ")", "if", "bmap", "is", "not", "None", ":", "assert", "isinstance", "(", "bmap", ",", "(", "dict", ",", "ChainMap", ")", ")", "if", "len", "(", "bmap", ")", ":", "if", "isinstance", "(", "bmap", ",", "ChainMap", ")", ":", "maps", ".", "extend", "(", "bmap", ".", "maps", ")", "else", ":", "maps", ".", "append", "(", "bmap", ")", "result", ".", "append", "(", "ChainMap", "(", "{", "}", ",", "*", "maps", ")", ")", "if", "merge", ":", "result", "=", "[", "dict", "(", "map", ")", "for", "map", "in", "result", "]", "if", "len", "(", "names", ")", "==", "1", ":", "return", "result", "[", "0", "]", "return", "result" ]
For all of the names build a ChainMap containing a map for every base class.
[ "For", "all", "of", "the", "names", "build", "a", "ChainMap", "containing", "a", "map", "for", "every", "base", "class", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L55-L75
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._build_instance_handler_mapping
def _build_instance_handler_mapping(cls, instance, handle_d): """For every unbound handler, get the bound version.""" res = {} for member_name, sig_name in handle_d.items(): if sig_name in res: sig_handlers = res[sig_name] else: sig_handlers = res[sig_name] = [] sig_handlers.append(getattr(instance, member_name)) return res
python
def _build_instance_handler_mapping(cls, instance, handle_d): """For every unbound handler, get the bound version.""" res = {} for member_name, sig_name in handle_d.items(): if sig_name in res: sig_handlers = res[sig_name] else: sig_handlers = res[sig_name] = [] sig_handlers.append(getattr(instance, member_name)) return res
[ "def", "_build_instance_handler_mapping", "(", "cls", ",", "instance", ",", "handle_d", ")", ":", "res", "=", "{", "}", "for", "member_name", ",", "sig_name", "in", "handle_d", ".", "items", "(", ")", ":", "if", "sig_name", "in", "res", ":", "sig_handlers", "=", "res", "[", "sig_name", "]", "else", ":", "sig_handlers", "=", "res", "[", "sig_name", "]", "=", "[", "]", "sig_handlers", ".", "append", "(", "getattr", "(", "instance", ",", "member_name", ")", ")", "return", "res" ]
For every unbound handler, get the bound version.
[ "For", "every", "unbound", "handler", "get", "the", "bound", "version", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L134-L143
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._check_local_handlers
def _check_local_handlers(cls, signals, handlers, namespace, configs): """For every marked handler, see if there is a suitable signal. If not, raise an error.""" for aname, sig_name in handlers.items(): # WARN: this code doesn't take in account the case where a new # method with the same name of an handler in a base class is # present in this class but it isn't an handler (so the handler # with the same name should be removed from the handlers) if sig_name not in signals: disable_check = configs[aname].get('disable_check', False) if not disable_check: raise SignalError("Cannot find a signal named '%s'" % sig_name)
python
def _check_local_handlers(cls, signals, handlers, namespace, configs): """For every marked handler, see if there is a suitable signal. If not, raise an error.""" for aname, sig_name in handlers.items(): # WARN: this code doesn't take in account the case where a new # method with the same name of an handler in a base class is # present in this class but it isn't an handler (so the handler # with the same name should be removed from the handlers) if sig_name not in signals: disable_check = configs[aname].get('disable_check', False) if not disable_check: raise SignalError("Cannot find a signal named '%s'" % sig_name)
[ "def", "_check_local_handlers", "(", "cls", ",", "signals", ",", "handlers", ",", "namespace", ",", "configs", ")", ":", "for", "aname", ",", "sig_name", "in", "handlers", ".", "items", "(", ")", ":", "# WARN: this code doesn't take in account the case where a new", "# method with the same name of an handler in a base class is", "# present in this class but it isn't an handler (so the handler", "# with the same name should be removed from the handlers)", "if", "sig_name", "not", "in", "signals", ":", "disable_check", "=", "configs", "[", "aname", "]", ".", "get", "(", "'disable_check'", ",", "False", ")", "if", "not", "disable_check", ":", "raise", "SignalError", "(", "\"Cannot find a signal named '%s'\"", "%", "sig_name", ")" ]
For every marked handler, see if there is a suitable signal. If not, raise an error.
[ "For", "every", "marked", "handler", "see", "if", "there", "is", "a", "suitable", "signal", ".", "If", "not", "raise", "an", "error", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L145-L157
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._find_local_signals
def _find_local_signals(cls, signals, namespace): """Add name info to every "local" (present in the body of this class) signal and add it to the mapping. Also complete signal initialization as member of the class by injecting its name. """ from . import Signal signaller = cls._external_signaller_and_handler for aname, avalue in namespace.items(): if isinstance(avalue, Signal): if avalue.name: aname = avalue.name else: avalue.name = aname assert ((aname not in signals) or (aname in signals and avalue is not signals[aname])), \ ("The same signal {name!r} was found " "two times".format(name=aname)) if signaller: avalue.external_signaller = signaller signals[aname] = avalue
python
def _find_local_signals(cls, signals, namespace): """Add name info to every "local" (present in the body of this class) signal and add it to the mapping. Also complete signal initialization as member of the class by injecting its name. """ from . import Signal signaller = cls._external_signaller_and_handler for aname, avalue in namespace.items(): if isinstance(avalue, Signal): if avalue.name: aname = avalue.name else: avalue.name = aname assert ((aname not in signals) or (aname in signals and avalue is not signals[aname])), \ ("The same signal {name!r} was found " "two times".format(name=aname)) if signaller: avalue.external_signaller = signaller signals[aname] = avalue
[ "def", "_find_local_signals", "(", "cls", ",", "signals", ",", "namespace", ")", ":", "from", ".", "import", "Signal", "signaller", "=", "cls", ".", "_external_signaller_and_handler", "for", "aname", ",", "avalue", "in", "namespace", ".", "items", "(", ")", ":", "if", "isinstance", "(", "avalue", ",", "Signal", ")", ":", "if", "avalue", ".", "name", ":", "aname", "=", "avalue", ".", "name", "else", ":", "avalue", ".", "name", "=", "aname", "assert", "(", "(", "aname", "not", "in", "signals", ")", "or", "(", "aname", "in", "signals", "and", "avalue", "is", "not", "signals", "[", "aname", "]", ")", ")", ",", "(", "\"The same signal {name!r} was found \"", "\"two times\"", ".", "format", "(", "name", "=", "aname", ")", ")", "if", "signaller", ":", "avalue", ".", "external_signaller", "=", "signaller", "signals", "[", "aname", "]", "=", "avalue" ]
Add name info to every "local" (present in the body of this class) signal and add it to the mapping. Also complete signal initialization as member of the class by injecting its name.
[ "Add", "name", "info", "to", "every", "local", "(", "present", "in", "the", "body", "of", "this", "class", ")", "signal", "and", "add", "it", "to", "the", "mapping", ".", "Also", "complete", "signal", "initialization", "as", "member", "of", "the", "class", "by", "injecting", "its", "name", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L159-L178
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._find_local_handlers
def _find_local_handlers(cls, handlers, namespace, configs): """Add name info to every "local" (present in the body of this class) handler and add it to the mapping. """ for aname, avalue in namespace.items(): sig_name, config = cls._is_handler(aname, avalue) if sig_name: configs[aname] = config handlers[aname] = sig_name
python
def _find_local_handlers(cls, handlers, namespace, configs): """Add name info to every "local" (present in the body of this class) handler and add it to the mapping. """ for aname, avalue in namespace.items(): sig_name, config = cls._is_handler(aname, avalue) if sig_name: configs[aname] = config handlers[aname] = sig_name
[ "def", "_find_local_handlers", "(", "cls", ",", "handlers", ",", "namespace", ",", "configs", ")", ":", "for", "aname", ",", "avalue", "in", "namespace", ".", "items", "(", ")", ":", "sig_name", ",", "config", "=", "cls", ".", "_is_handler", "(", "aname", ",", "avalue", ")", "if", "sig_name", ":", "configs", "[", "aname", "]", "=", "config", "handlers", "[", "aname", "]", "=", "sig_name" ]
Add name info to every "local" (present in the body of this class) handler and add it to the mapping.
[ "Add", "name", "info", "to", "every", "local", "(", "present", "in", "the", "body", "of", "this", "class", ")", "handler", "and", "add", "it", "to", "the", "mapping", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L180-L188
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._get_class_handlers
def _get_class_handlers(cls, signal_name, instance): """Returns the handlers registered at class level. """ handlers = cls._signal_handlers_sorted[signal_name] return [getattr(instance, hname) for hname in handlers]
python
def _get_class_handlers(cls, signal_name, instance): """Returns the handlers registered at class level. """ handlers = cls._signal_handlers_sorted[signal_name] return [getattr(instance, hname) for hname in handlers]
[ "def", "_get_class_handlers", "(", "cls", ",", "signal_name", ",", "instance", ")", ":", "handlers", "=", "cls", ".", "_signal_handlers_sorted", "[", "signal_name", "]", "return", "[", "getattr", "(", "instance", ",", "hname", ")", "for", "hname", "in", "handlers", "]" ]
Returns the handlers registered at class level.
[ "Returns", "the", "handlers", "registered", "at", "class", "level", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L190-L194
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta._sort_handlers
def _sort_handlers(cls, signals, handlers, configs): """Sort class defined handlers to give precedence to those declared at lower level. ``config`` can contain two keys ``begin`` or ``end`` that will further reposition the handler at the two extremes. """ def macro_precedence_sorter(flags, hname): """The default is to sort 'bottom_up', with lower level getting executed first, but sometimes you need them reversed.""" data = configs[hname] topdown_sort = SignalOptions.SORT_TOPDOWN in flags if topdown_sort: level = levels_count - 1 - data['level'] else: level = data['level'] if 'begin' in data: return (-1, level, hname) elif 'end' in data: return (1, level, hname) else: return (0, level, hname) levels_count = len(handlers.maps) per_signal = defaultdict(list) for level, m in enumerate(reversed(handlers.maps)): for hname, sig_name in m.items(): sig_handlers = per_signal[sig_name] if hname not in sig_handlers: configs[hname]['level'] = level sig_handlers.append(hname) for sig_name, sig_handlers in per_signal.items(): if sig_name in signals: # it may be on a mixin flags = signals[sig_name].flags sig_handlers.sort(key=partial(macro_precedence_sorter, flags)) return per_signal
python
def _sort_handlers(cls, signals, handlers, configs): """Sort class defined handlers to give precedence to those declared at lower level. ``config`` can contain two keys ``begin`` or ``end`` that will further reposition the handler at the two extremes. """ def macro_precedence_sorter(flags, hname): """The default is to sort 'bottom_up', with lower level getting executed first, but sometimes you need them reversed.""" data = configs[hname] topdown_sort = SignalOptions.SORT_TOPDOWN in flags if topdown_sort: level = levels_count - 1 - data['level'] else: level = data['level'] if 'begin' in data: return (-1, level, hname) elif 'end' in data: return (1, level, hname) else: return (0, level, hname) levels_count = len(handlers.maps) per_signal = defaultdict(list) for level, m in enumerate(reversed(handlers.maps)): for hname, sig_name in m.items(): sig_handlers = per_signal[sig_name] if hname not in sig_handlers: configs[hname]['level'] = level sig_handlers.append(hname) for sig_name, sig_handlers in per_signal.items(): if sig_name in signals: # it may be on a mixin flags = signals[sig_name].flags sig_handlers.sort(key=partial(macro_precedence_sorter, flags)) return per_signal
[ "def", "_sort_handlers", "(", "cls", ",", "signals", ",", "handlers", ",", "configs", ")", ":", "def", "macro_precedence_sorter", "(", "flags", ",", "hname", ")", ":", "\"\"\"The default is to sort 'bottom_up', with lower level getting\n executed first, but sometimes you need them reversed.\"\"\"", "data", "=", "configs", "[", "hname", "]", "topdown_sort", "=", "SignalOptions", ".", "SORT_TOPDOWN", "in", "flags", "if", "topdown_sort", ":", "level", "=", "levels_count", "-", "1", "-", "data", "[", "'level'", "]", "else", ":", "level", "=", "data", "[", "'level'", "]", "if", "'begin'", "in", "data", ":", "return", "(", "-", "1", ",", "level", ",", "hname", ")", "elif", "'end'", "in", "data", ":", "return", "(", "1", ",", "level", ",", "hname", ")", "else", ":", "return", "(", "0", ",", "level", ",", "hname", ")", "levels_count", "=", "len", "(", "handlers", ".", "maps", ")", "per_signal", "=", "defaultdict", "(", "list", ")", "for", "level", ",", "m", "in", "enumerate", "(", "reversed", "(", "handlers", ".", "maps", ")", ")", ":", "for", "hname", ",", "sig_name", "in", "m", ".", "items", "(", ")", ":", "sig_handlers", "=", "per_signal", "[", "sig_name", "]", "if", "hname", "not", "in", "sig_handlers", ":", "configs", "[", "hname", "]", "[", "'level'", "]", "=", "level", "sig_handlers", ".", "append", "(", "hname", ")", "for", "sig_name", ",", "sig_handlers", "in", "per_signal", ".", "items", "(", ")", ":", "if", "sig_name", "in", "signals", ":", "# it may be on a mixin", "flags", "=", "signals", "[", "sig_name", "]", ".", "flags", "sig_handlers", ".", "sort", "(", "key", "=", "partial", "(", "macro_precedence_sorter", ",", "flags", ")", ")", "return", "per_signal" ]
Sort class defined handlers to give precedence to those declared at lower level. ``config`` can contain two keys ``begin`` or ``end`` that will further reposition the handler at the two extremes.
[ "Sort", "class", "defined", "handlers", "to", "give", "precedence", "to", "those", "declared", "at", "lower", "level", ".", "config", "can", "contain", "two", "keys", "begin", "or", "end", "that", "will", "further", "reposition", "the", "handler", "at", "the", "two", "extremes", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L196-L230
metapensiero/metapensiero.signal
src/metapensiero/signal/user.py
SignalAndHandlerInitMeta.instance_signals_and_handlers
def instance_signals_and_handlers(cls, instance): """Calculate per-instance signals and handlers.""" isignals = cls._signals.copy() ihandlers = cls._build_instance_handler_mapping( instance, cls._signal_handlers ) return isignals, ihandlers
python
def instance_signals_and_handlers(cls, instance): """Calculate per-instance signals and handlers.""" isignals = cls._signals.copy() ihandlers = cls._build_instance_handler_mapping( instance, cls._signal_handlers ) return isignals, ihandlers
[ "def", "instance_signals_and_handlers", "(", "cls", ",", "instance", ")", ":", "isignals", "=", "cls", ".", "_signals", ".", "copy", "(", ")", "ihandlers", "=", "cls", ".", "_build_instance_handler_mapping", "(", "instance", ",", "cls", ".", "_signal_handlers", ")", "return", "isignals", ",", "ihandlers" ]
Calculate per-instance signals and handlers.
[ "Calculate", "per", "-", "instance", "signals", "and", "handlers", "." ]
train
https://github.com/metapensiero/metapensiero.signal/blob/1cbbb2e4bff00bf4887163b08b70d278e472bfe3/src/metapensiero/signal/user.py#L232-L240
meng89/ipodshuffle
ipodshuffle/storage/log.py
Storage.add
def add(self, src): """ :param src: file path :return: checksum value """ checksum = get_checksum(src) filename = self.get_filename(checksum) if not filename: new_name = self._get_new_name() new_realpath = self._storage_dir + '/' + new_name os.makedirs(os.path.split(new_realpath)[0], exist_ok=True) shutil.copyfile(src, new_realpath) self._log[new_name] = { 'checksum': checksum, 'mtime': os.path.getmtime(new_realpath), 'size': os.path.getsize(new_realpath) } self.write_log() return checksum
python
def add(self, src): """ :param src: file path :return: checksum value """ checksum = get_checksum(src) filename = self.get_filename(checksum) if not filename: new_name = self._get_new_name() new_realpath = self._storage_dir + '/' + new_name os.makedirs(os.path.split(new_realpath)[0], exist_ok=True) shutil.copyfile(src, new_realpath) self._log[new_name] = { 'checksum': checksum, 'mtime': os.path.getmtime(new_realpath), 'size': os.path.getsize(new_realpath) } self.write_log() return checksum
[ "def", "add", "(", "self", ",", "src", ")", ":", "checksum", "=", "get_checksum", "(", "src", ")", "filename", "=", "self", ".", "get_filename", "(", "checksum", ")", "if", "not", "filename", ":", "new_name", "=", "self", ".", "_get_new_name", "(", ")", "new_realpath", "=", "self", ".", "_storage_dir", "+", "'/'", "+", "new_name", "os", ".", "makedirs", "(", "os", ".", "path", ".", "split", "(", "new_realpath", ")", "[", "0", "]", ",", "exist_ok", "=", "True", ")", "shutil", ".", "copyfile", "(", "src", ",", "new_realpath", ")", "self", ".", "_log", "[", "new_name", "]", "=", "{", "'checksum'", ":", "checksum", ",", "'mtime'", ":", "os", ".", "path", ".", "getmtime", "(", "new_realpath", ")", ",", "'size'", ":", "os", ".", "path", ".", "getsize", "(", "new_realpath", ")", "}", "self", ".", "write_log", "(", ")", "return", "checksum" ]
:param src: file path :return: checksum value
[ ":", "param", "src", ":", "file", "path", ":", "return", ":", "checksum", "value" ]
train
https://github.com/meng89/ipodshuffle/blob/c9093dbb5cdac609376ebd3b4ef1b0fc58107d96/ipodshuffle/storage/log.py#L129-L155
meng89/ipodshuffle
ipodshuffle/storage/log.py
Storage.get_filename
def get_filename(self, checksum): """ :param checksum: checksum :return: filename no storage base part """ filename = None for _filename, metadata in self._log.items(): if metadata['checksum'] == checksum: filename = _filename break return filename
python
def get_filename(self, checksum): """ :param checksum: checksum :return: filename no storage base part """ filename = None for _filename, metadata in self._log.items(): if metadata['checksum'] == checksum: filename = _filename break return filename
[ "def", "get_filename", "(", "self", ",", "checksum", ")", ":", "filename", "=", "None", "for", "_filename", ",", "metadata", "in", "self", ".", "_log", ".", "items", "(", ")", ":", "if", "metadata", "[", "'checksum'", "]", "==", "checksum", ":", "filename", "=", "_filename", "break", "return", "filename" ]
:param checksum: checksum :return: filename no storage base part
[ ":", "param", "checksum", ":", "checksum", ":", "return", ":", "filename", "no", "storage", "base", "part" ]
train
https://github.com/meng89/ipodshuffle/blob/c9093dbb5cdac609376ebd3b4ef1b0fc58107d96/ipodshuffle/storage/log.py#L157-L167
letuananh/chirptext
chirptext/arsenal.py
JiCache.__retrieve
def __retrieve(self, key): ''' Retrieve file location from cache DB ''' with self.get_conn() as conn: try: c = conn.cursor() if key is None: c.execute("SELECT value FROM cache_entries WHERE key IS NULL") else: c.execute("SELECT value FROM cache_entries WHERE key = ?", (key,)) result = c.fetchone() if result is None or len(result) != 1: getLogger().info("There's no entry with key={key}".format(key=key)) return None else: return result[0] except: getLogger().exception("Cannot retrieve") return None
python
def __retrieve(self, key): ''' Retrieve file location from cache DB ''' with self.get_conn() as conn: try: c = conn.cursor() if key is None: c.execute("SELECT value FROM cache_entries WHERE key IS NULL") else: c.execute("SELECT value FROM cache_entries WHERE key = ?", (key,)) result = c.fetchone() if result is None or len(result) != 1: getLogger().info("There's no entry with key={key}".format(key=key)) return None else: return result[0] except: getLogger().exception("Cannot retrieve") return None
[ "def", "__retrieve", "(", "self", ",", "key", ")", ":", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "try", ":", "c", "=", "conn", ".", "cursor", "(", ")", "if", "key", "is", "None", ":", "c", ".", "execute", "(", "\"SELECT value FROM cache_entries WHERE key IS NULL\"", ")", "else", ":", "c", ".", "execute", "(", "\"SELECT value FROM cache_entries WHERE key = ?\"", ",", "(", "key", ",", ")", ")", "result", "=", "c", ".", "fetchone", "(", ")", "if", "result", "is", "None", "or", "len", "(", "result", ")", "!=", "1", ":", "getLogger", "(", ")", ".", "info", "(", "\"There's no entry with key={key}\"", ".", "format", "(", "key", "=", "key", ")", ")", "return", "None", "else", ":", "return", "result", "[", "0", "]", "except", ":", "getLogger", "(", ")", ".", "exception", "(", "\"Cannot retrieve\"", ")", "return", "None" ]
Retrieve file location from cache DB
[ "Retrieve", "file", "location", "from", "cache", "DB" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L85-L103
letuananh/chirptext
chirptext/arsenal.py
JiCache.__insert
def __insert(self, key, value): ''' Insert a new key to database ''' if key in self: getLogger().warning("Cache entry exists, cannot insert a new entry with key='{key}'".format(key=key)) return False with self.get_conn() as conn: try: c = conn.cursor() c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, value)) conn.commit() return True except Exception as e: # NOTE: A cache error can be forgiven, no? getLogger().debug("Cache Error: Cannot insert | Detail = %s" % (e,)) return False
python
def __insert(self, key, value): ''' Insert a new key to database ''' if key in self: getLogger().warning("Cache entry exists, cannot insert a new entry with key='{key}'".format(key=key)) return False with self.get_conn() as conn: try: c = conn.cursor() c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, value)) conn.commit() return True except Exception as e: # NOTE: A cache error can be forgiven, no? getLogger().debug("Cache Error: Cannot insert | Detail = %s" % (e,)) return False
[ "def", "__insert", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", ":", "getLogger", "(", ")", ".", "warning", "(", "\"Cache entry exists, cannot insert a new entry with key='{key}'\"", ".", "format", "(", "key", "=", "key", ")", ")", "return", "False", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "try", ":", "c", "=", "conn", ".", "cursor", "(", ")", "c", ".", "execute", "(", "\"INSERT INTO cache_entries (key, value) VALUES (?,?)\"", ",", "(", "key", ",", "value", ")", ")", "conn", ".", "commit", "(", ")", "return", "True", "except", "Exception", "as", "e", ":", "# NOTE: A cache error can be forgiven, no?", "getLogger", "(", ")", ".", "debug", "(", "\"Cache Error: Cannot insert | Detail = %s\"", "%", "(", "e", ",", ")", ")", "return", "False" ]
Insert a new key to database
[ "Insert", "a", "new", "key", "to", "database" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L111-L127
letuananh/chirptext
chirptext/arsenal.py
JiCache.__delete
def __delete(self, key): ''' Delete file key from database ''' with self.get_conn() as conn: try: c = conn.cursor() c.execute("DELETE FROM cache_entries WHERE key = ?", (key,)) conn.commit() except: getLogger().exception("Cannot delete") return None
python
def __delete(self, key): ''' Delete file key from database ''' with self.get_conn() as conn: try: c = conn.cursor() c.execute("DELETE FROM cache_entries WHERE key = ?", (key,)) conn.commit() except: getLogger().exception("Cannot delete") return None
[ "def", "__delete", "(", "self", ",", "key", ")", ":", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "try", ":", "c", "=", "conn", ".", "cursor", "(", ")", "c", ".", "execute", "(", "\"DELETE FROM cache_entries WHERE key = ?\"", ",", "(", "key", ",", ")", ")", "conn", ".", "commit", "(", ")", "except", ":", "getLogger", "(", ")", ".", "exception", "(", "\"Cannot delete\"", ")", "return", "None" ]
Delete file key from database
[ "Delete", "file", "key", "from", "database" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L129-L139
letuananh/chirptext
chirptext/arsenal.py
JiCache.__insert_internal_blob
def __insert_internal_blob(self, key, blob, compressed=True): ''' This method will insert blob data to blob table ''' with self.get_conn() as conn: conn.isolation_level = None c = conn.cursor() try: compressed_flag = 1 if compressed else 0 if compressed: blob = zlib.compress(blob) c.execute("BEGIN") c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, JiCache.INTERNAL_BLOB)) c.execute("INSERT INTO blob_entries (key, compressed, blob_data) VALUES (?,?,?)", (key, compressed_flag, sqlite3.Binary(blob),)) c.execute("COMMIT") return True except: getLogger().debug("Cannot insert") return False
python
def __insert_internal_blob(self, key, blob, compressed=True): ''' This method will insert blob data to blob table ''' with self.get_conn() as conn: conn.isolation_level = None c = conn.cursor() try: compressed_flag = 1 if compressed else 0 if compressed: blob = zlib.compress(blob) c.execute("BEGIN") c.execute("INSERT INTO cache_entries (key, value) VALUES (?,?)", (key, JiCache.INTERNAL_BLOB)) c.execute("INSERT INTO blob_entries (key, compressed, blob_data) VALUES (?,?,?)", (key, compressed_flag, sqlite3.Binary(blob),)) c.execute("COMMIT") return True except: getLogger().debug("Cannot insert") return False
[ "def", "__insert_internal_blob", "(", "self", ",", "key", ",", "blob", ",", "compressed", "=", "True", ")", ":", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "conn", ".", "isolation_level", "=", "None", "c", "=", "conn", ".", "cursor", "(", ")", "try", ":", "compressed_flag", "=", "1", "if", "compressed", "else", "0", "if", "compressed", ":", "blob", "=", "zlib", ".", "compress", "(", "blob", ")", "c", ".", "execute", "(", "\"BEGIN\"", ")", "c", ".", "execute", "(", "\"INSERT INTO cache_entries (key, value) VALUES (?,?)\"", ",", "(", "key", ",", "JiCache", ".", "INTERNAL_BLOB", ")", ")", "c", ".", "execute", "(", "\"INSERT INTO blob_entries (key, compressed, blob_data) VALUES (?,?,?)\"", ",", "(", "key", ",", "compressed_flag", ",", "sqlite3", ".", "Binary", "(", "blob", ")", ",", ")", ")", "c", ".", "execute", "(", "\"COMMIT\"", ")", "return", "True", "except", ":", "getLogger", "(", ")", ".", "debug", "(", "\"Cannot insert\"", ")", "return", "False" ]
This method will insert blob data to blob table
[ "This", "method", "will", "insert", "blob", "data", "to", "blob", "table" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L143-L160
letuananh/chirptext
chirptext/arsenal.py
JiCache.__delete_internal_blob
def __delete_internal_blob(self, key): ''' This method will insert blob data to blob table ''' with self.get_conn() as conn: conn.isolation_level = None try: c = conn.cursor() c.execute("BEGIN") if key is None: c.execute("DELETE FROM cache_entries WHERE key IS NULL") c.execute("DELETE FROM blob_entries WHERE KEY IS NULL") else: c.execute("DELETE FROM cache_entries WHERE key = ?", (key,)) c.execute("DELETE FROM blob_entries WHERE KEY = ?", (key,)) c.execute("COMMIT") except: getLogger().debug("Cannot delete") return False return True
python
def __delete_internal_blob(self, key): ''' This method will insert blob data to blob table ''' with self.get_conn() as conn: conn.isolation_level = None try: c = conn.cursor() c.execute("BEGIN") if key is None: c.execute("DELETE FROM cache_entries WHERE key IS NULL") c.execute("DELETE FROM blob_entries WHERE KEY IS NULL") else: c.execute("DELETE FROM cache_entries WHERE key = ?", (key,)) c.execute("DELETE FROM blob_entries WHERE KEY = ?", (key,)) c.execute("COMMIT") except: getLogger().debug("Cannot delete") return False return True
[ "def", "__delete_internal_blob", "(", "self", ",", "key", ")", ":", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "conn", ".", "isolation_level", "=", "None", "try", ":", "c", "=", "conn", ".", "cursor", "(", ")", "c", ".", "execute", "(", "\"BEGIN\"", ")", "if", "key", "is", "None", ":", "c", ".", "execute", "(", "\"DELETE FROM cache_entries WHERE key IS NULL\"", ")", "c", ".", "execute", "(", "\"DELETE FROM blob_entries WHERE KEY IS NULL\"", ")", "else", ":", "c", ".", "execute", "(", "\"DELETE FROM cache_entries WHERE key = ?\"", ",", "(", "key", ",", ")", ")", "c", ".", "execute", "(", "\"DELETE FROM blob_entries WHERE KEY = ?\"", ",", "(", "key", ",", ")", ")", "c", ".", "execute", "(", "\"COMMIT\"", ")", "except", ":", "getLogger", "(", ")", ".", "debug", "(", "\"Cannot delete\"", ")", "return", "False", "return", "True" ]
This method will insert blob data to blob table
[ "This", "method", "will", "insert", "blob", "data", "to", "blob", "table" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L162-L180
letuananh/chirptext
chirptext/arsenal.py
JiCache.__retrieve_internal_blob
def __retrieve_internal_blob(self, key): ''' Retrieve file location from cache DB ''' logger = getLogger() with self.get_conn() as conn: try: c = conn.cursor() if key is None: c.execute("SELECT compressed, blob_data FROM blob_entries WHERE KEY IS NULL") else: c.execute("SELECT compressed, blob_data FROM blob_entries WHERE KEY = ?", (key,)) result = c.fetchone() if not result: logger.debug("There's no blob entry with key={key}".format(key=key)) logger.debug("result = {res}".format(res=result)) return None else: compressed, blob_data = result logger.debug("retrieving internal BLOB (key={key} | len={ln} | compressed={c})".format(key=key, ln=len(blob_data), c=compressed)) return blob_data if not compressed else zlib.decompress(blob_data) except: getLogger().exception("Cannot retrieve internal blob (key={})".format(key)) return None return True
python
def __retrieve_internal_blob(self, key): ''' Retrieve file location from cache DB ''' logger = getLogger() with self.get_conn() as conn: try: c = conn.cursor() if key is None: c.execute("SELECT compressed, blob_data FROM blob_entries WHERE KEY IS NULL") else: c.execute("SELECT compressed, blob_data FROM blob_entries WHERE KEY = ?", (key,)) result = c.fetchone() if not result: logger.debug("There's no blob entry with key={key}".format(key=key)) logger.debug("result = {res}".format(res=result)) return None else: compressed, blob_data = result logger.debug("retrieving internal BLOB (key={key} | len={ln} | compressed={c})".format(key=key, ln=len(blob_data), c=compressed)) return blob_data if not compressed else zlib.decompress(blob_data) except: getLogger().exception("Cannot retrieve internal blob (key={})".format(key)) return None return True
[ "def", "__retrieve_internal_blob", "(", "self", ",", "key", ")", ":", "logger", "=", "getLogger", "(", ")", "with", "self", ".", "get_conn", "(", ")", "as", "conn", ":", "try", ":", "c", "=", "conn", ".", "cursor", "(", ")", "if", "key", "is", "None", ":", "c", ".", "execute", "(", "\"SELECT compressed, blob_data FROM blob_entries WHERE KEY IS NULL\"", ")", "else", ":", "c", ".", "execute", "(", "\"SELECT compressed, blob_data FROM blob_entries WHERE KEY = ?\"", ",", "(", "key", ",", ")", ")", "result", "=", "c", ".", "fetchone", "(", ")", "if", "not", "result", ":", "logger", ".", "debug", "(", "\"There's no blob entry with key={key}\"", ".", "format", "(", "key", "=", "key", ")", ")", "logger", ".", "debug", "(", "\"result = {res}\"", ".", "format", "(", "res", "=", "result", ")", ")", "return", "None", "else", ":", "compressed", ",", "blob_data", "=", "result", "logger", ".", "debug", "(", "\"retrieving internal BLOB (key={key} | len={ln} | compressed={c})\"", ".", "format", "(", "key", "=", "key", ",", "ln", "=", "len", "(", "blob_data", ")", ",", "c", "=", "compressed", ")", ")", "return", "blob_data", "if", "not", "compressed", "else", "zlib", ".", "decompress", "(", "blob_data", ")", "except", ":", "getLogger", "(", ")", ".", "exception", "(", "\"Cannot retrieve internal blob (key={})\"", ".", "format", "(", "key", ")", ")", "return", "None", "return", "True" ]
Retrieve file location from cache DB
[ "Retrieve", "file", "location", "from", "cache", "DB" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L182-L205
letuananh/chirptext
chirptext/arsenal.py
JiCache.retrieve_blob
def retrieve_blob(self, key, encoding=None): ''' Retrieve blob in binary format (or string format if encoding is provided) ''' blob_key = self.__retrieve(key) if blob_key is None: return None if not blob_key: raise Exception("Invalid blob_key") elif blob_key == JiCache.INTERNAL_BLOB: blob_data = self.__retrieve_internal_blob(key) return blob_data if not encoding else blob_data.decode(encoding) else: getLogger().debug("Key[{key}] -> [{blob_key}]".format(key=key, blob_key=blob_key)) blob_file = os.path.join(self.blob_location, blob_key) return FileHelper.read(blob_file)
python
def retrieve_blob(self, key, encoding=None): ''' Retrieve blob in binary format (or string format if encoding is provided) ''' blob_key = self.__retrieve(key) if blob_key is None: return None if not blob_key: raise Exception("Invalid blob_key") elif blob_key == JiCache.INTERNAL_BLOB: blob_data = self.__retrieve_internal_blob(key) return blob_data if not encoding else blob_data.decode(encoding) else: getLogger().debug("Key[{key}] -> [{blob_key}]".format(key=key, blob_key=blob_key)) blob_file = os.path.join(self.blob_location, blob_key) return FileHelper.read(blob_file)
[ "def", "retrieve_blob", "(", "self", ",", "key", ",", "encoding", "=", "None", ")", ":", "blob_key", "=", "self", ".", "__retrieve", "(", "key", ")", "if", "blob_key", "is", "None", ":", "return", "None", "if", "not", "blob_key", ":", "raise", "Exception", "(", "\"Invalid blob_key\"", ")", "elif", "blob_key", "==", "JiCache", ".", "INTERNAL_BLOB", ":", "blob_data", "=", "self", ".", "__retrieve_internal_blob", "(", "key", ")", "return", "blob_data", "if", "not", "encoding", "else", "blob_data", ".", "decode", "(", "encoding", ")", "else", ":", "getLogger", "(", ")", ".", "debug", "(", "\"Key[{key}] -> [{blob_key}]\"", ".", "format", "(", "key", "=", "key", ",", "blob_key", "=", "blob_key", ")", ")", "blob_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "blob_location", ",", "blob_key", ")", "return", "FileHelper", ".", "read", "(", "blob_file", ")" ]
Retrieve blob in binary format (or string format if encoding is provided)
[ "Retrieve", "blob", "in", "binary", "format", "(", "or", "string", "format", "if", "encoding", "is", "provided", ")" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/arsenal.py#L233-L246
pyout/pyout
pyout/summary.py
Summary.summarize
def summarize(self, rows): """Return summary rows for `rows`. Parameters ---------- rows : list of dicts Normalized rows to summarize. Returns ------- A list of summary rows. Each row is a tuple where the first item is the data and the second is a dict of keyword arguments that can be passed to StyleFields.render. """ columns = list(rows[0].keys()) agg_styles = {c: self.style[c]["aggregate"] for c in columns if "aggregate" in self.style[c]} summaries = {} for col, agg_fn in agg_styles.items(): lgr.debug("Summarizing column %r with %r", col, agg_fn) colvals = filter(lambda x: not isinstance(x, Nothing), (row[col] for row in rows)) summaries[col] = agg_fn(list(colvals)) # The rest is just restructuring the summaries into rows that are # compatible with pyout.Content. Most the complexity below comes from # the fact that a summary function is allowed to return either a single # item or a list of items. maxlen = max(len(v) if isinstance(v, list) else 1 for v in summaries.values()) summary_rows = [] for rowidx in range(maxlen): sumrow = {} for column, values in summaries.items(): if isinstance(values, list): if rowidx >= len(values): continue sumrow[column] = values[rowidx] elif rowidx == 0: sumrow[column] = values for column in columns: if column not in sumrow: sumrow[column] = "" summary_rows.append((sumrow, {"style": self.style.get("aggregate_"), "adopt": False})) return summary_rows
python
def summarize(self, rows): """Return summary rows for `rows`. Parameters ---------- rows : list of dicts Normalized rows to summarize. Returns ------- A list of summary rows. Each row is a tuple where the first item is the data and the second is a dict of keyword arguments that can be passed to StyleFields.render. """ columns = list(rows[0].keys()) agg_styles = {c: self.style[c]["aggregate"] for c in columns if "aggregate" in self.style[c]} summaries = {} for col, agg_fn in agg_styles.items(): lgr.debug("Summarizing column %r with %r", col, agg_fn) colvals = filter(lambda x: not isinstance(x, Nothing), (row[col] for row in rows)) summaries[col] = agg_fn(list(colvals)) # The rest is just restructuring the summaries into rows that are # compatible with pyout.Content. Most the complexity below comes from # the fact that a summary function is allowed to return either a single # item or a list of items. maxlen = max(len(v) if isinstance(v, list) else 1 for v in summaries.values()) summary_rows = [] for rowidx in range(maxlen): sumrow = {} for column, values in summaries.items(): if isinstance(values, list): if rowidx >= len(values): continue sumrow[column] = values[rowidx] elif rowidx == 0: sumrow[column] = values for column in columns: if column not in sumrow: sumrow[column] = "" summary_rows.append((sumrow, {"style": self.style.get("aggregate_"), "adopt": False})) return summary_rows
[ "def", "summarize", "(", "self", ",", "rows", ")", ":", "columns", "=", "list", "(", "rows", "[", "0", "]", ".", "keys", "(", ")", ")", "agg_styles", "=", "{", "c", ":", "self", ".", "style", "[", "c", "]", "[", "\"aggregate\"", "]", "for", "c", "in", "columns", "if", "\"aggregate\"", "in", "self", ".", "style", "[", "c", "]", "}", "summaries", "=", "{", "}", "for", "col", ",", "agg_fn", "in", "agg_styles", ".", "items", "(", ")", ":", "lgr", ".", "debug", "(", "\"Summarizing column %r with %r\"", ",", "col", ",", "agg_fn", ")", "colvals", "=", "filter", "(", "lambda", "x", ":", "not", "isinstance", "(", "x", ",", "Nothing", ")", ",", "(", "row", "[", "col", "]", "for", "row", "in", "rows", ")", ")", "summaries", "[", "col", "]", "=", "agg_fn", "(", "list", "(", "colvals", ")", ")", "# The rest is just restructuring the summaries into rows that are", "# compatible with pyout.Content. Most the complexity below comes from", "# the fact that a summary function is allowed to return either a single", "# item or a list of items.", "maxlen", "=", "max", "(", "len", "(", "v", ")", "if", "isinstance", "(", "v", ",", "list", ")", "else", "1", "for", "v", "in", "summaries", ".", "values", "(", ")", ")", "summary_rows", "=", "[", "]", "for", "rowidx", "in", "range", "(", "maxlen", ")", ":", "sumrow", "=", "{", "}", "for", "column", ",", "values", "in", "summaries", ".", "items", "(", ")", ":", "if", "isinstance", "(", "values", ",", "list", ")", ":", "if", "rowidx", ">=", "len", "(", "values", ")", ":", "continue", "sumrow", "[", "column", "]", "=", "values", "[", "rowidx", "]", "elif", "rowidx", "==", "0", ":", "sumrow", "[", "column", "]", "=", "values", "for", "column", "in", "columns", ":", "if", "column", "not", "in", "sumrow", ":", "sumrow", "[", "column", "]", "=", "\"\"", "summary_rows", ".", "append", "(", "(", "sumrow", ",", "{", "\"style\"", ":", "self", ".", "style", ".", "get", "(", "\"aggregate_\"", ")", ",", "\"adopt\"", ":", "False", "}", ")", ")", "return", "summary_rows" ]
Return summary rows for `rows`. Parameters ---------- rows : list of dicts Normalized rows to summarize. Returns ------- A list of summary rows. Each row is a tuple where the first item is the data and the second is a dict of keyword arguments that can be passed to StyleFields.render.
[ "Return", "summary", "rows", "for", "rows", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/summary.py#L33-L82
pyout/pyout
pyout/interface.py
Writer._init
def _init(self, style, streamer, processors=None): """Do writer-specific setup. Parameters ---------- style : dict Style, as passed to __init__. streamer : interface.Stream A stream interface that takes __init__'s `stream` and `interactive` arguments into account. processors : field.StyleProcessors, optional A writer-specific processors instance. Defaults to field.PlainProcessors(). """ self._stream = streamer if streamer.interactive: if streamer.supports_updates: self.mode = "update" else: self.mode = "incremental" else: self.mode = "final" if style and "width_" not in style and self._stream.width: style["width_"] = self._stream.width self._content = ContentWithSummary( StyleFields(style, processors or PlainProcessors()))
python
def _init(self, style, streamer, processors=None): """Do writer-specific setup. Parameters ---------- style : dict Style, as passed to __init__. streamer : interface.Stream A stream interface that takes __init__'s `stream` and `interactive` arguments into account. processors : field.StyleProcessors, optional A writer-specific processors instance. Defaults to field.PlainProcessors(). """ self._stream = streamer if streamer.interactive: if streamer.supports_updates: self.mode = "update" else: self.mode = "incremental" else: self.mode = "final" if style and "width_" not in style and self._stream.width: style["width_"] = self._stream.width self._content = ContentWithSummary( StyleFields(style, processors or PlainProcessors()))
[ "def", "_init", "(", "self", ",", "style", ",", "streamer", ",", "processors", "=", "None", ")", ":", "self", ".", "_stream", "=", "streamer", "if", "streamer", ".", "interactive", ":", "if", "streamer", ".", "supports_updates", ":", "self", ".", "mode", "=", "\"update\"", "else", ":", "self", ".", "mode", "=", "\"incremental\"", "else", ":", "self", ".", "mode", "=", "\"final\"", "if", "style", "and", "\"width_\"", "not", "in", "style", "and", "self", ".", "_stream", ".", "width", ":", "style", "[", "\"width_\"", "]", "=", "self", ".", "_stream", ".", "width", "self", ".", "_content", "=", "ContentWithSummary", "(", "StyleFields", "(", "style", ",", "processors", "or", "PlainProcessors", "(", ")", ")", ")" ]
Do writer-specific setup. Parameters ---------- style : dict Style, as passed to __init__. streamer : interface.Stream A stream interface that takes __init__'s `stream` and `interactive` arguments into account. processors : field.StyleProcessors, optional A writer-specific processors instance. Defaults to field.PlainProcessors().
[ "Do", "writer", "-", "specific", "setup", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/interface.py#L109-L135
pyout/pyout
pyout/interface.py
Writer.ids
def ids(self): """A list of unique IDs used to identify a row. If not explicitly set, it defaults to the first column name. """ if self._ids is None: if self._columns: if isinstance(self._columns, OrderedDict): return [list(self._columns.keys())[0]] return [self._columns[0]] else: return self._ids
python
def ids(self): """A list of unique IDs used to identify a row. If not explicitly set, it defaults to the first column name. """ if self._ids is None: if self._columns: if isinstance(self._columns, OrderedDict): return [list(self._columns.keys())[0]] return [self._columns[0]] else: return self._ids
[ "def", "ids", "(", "self", ")", ":", "if", "self", ".", "_ids", "is", "None", ":", "if", "self", ".", "_columns", ":", "if", "isinstance", "(", "self", ".", "_columns", ",", "OrderedDict", ")", ":", "return", "[", "list", "(", "self", ".", "_columns", ".", "keys", "(", ")", ")", "[", "0", "]", "]", "return", "[", "self", ".", "_columns", "[", "0", "]", "]", "else", ":", "return", "self", ".", "_ids" ]
A list of unique IDs used to identify a row. If not explicitly set, it defaults to the first column name.
[ "A", "list", "of", "unique", "IDs", "used", "to", "identify", "a", "row", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/interface.py#L188-L199
pyout/pyout
pyout/interface.py
Writer.wait
def wait(self): """Wait for asynchronous calls to return. """ if self._pool is None: return self._pool.close() self._pool.join()
python
def wait(self): """Wait for asynchronous calls to return. """ if self._pool is None: return self._pool.close() self._pool.join()
[ "def", "wait", "(", "self", ")", ":", "if", "self", ".", "_pool", "is", "None", ":", "return", "self", ".", "_pool", ".", "close", "(", ")", "self", ".", "_pool", ".", "join", "(", ")" ]
Wait for asynchronous calls to return.
[ "Wait", "for", "asynchronous", "calls", "to", "return", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/interface.py#L205-L211
pyout/pyout
pyout/interface.py
Writer._write_lock
def _write_lock(self): """Acquire and release the lock around output calls. This should allow multiple threads or processes to write output reliably. Code that modifies the `_content` attribute should also do so within this context. """ if self._lock: lgr.debug("Acquiring write lock") self._lock.acquire() try: yield finally: if self._lock: lgr.debug("Releasing write lock") self._lock.release()
python
def _write_lock(self): """Acquire and release the lock around output calls. This should allow multiple threads or processes to write output reliably. Code that modifies the `_content` attribute should also do so within this context. """ if self._lock: lgr.debug("Acquiring write lock") self._lock.acquire() try: yield finally: if self._lock: lgr.debug("Releasing write lock") self._lock.release()
[ "def", "_write_lock", "(", "self", ")", ":", "if", "self", ".", "_lock", ":", "lgr", ".", "debug", "(", "\"Acquiring write lock\"", ")", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "yield", "finally", ":", "if", "self", ".", "_lock", ":", "lgr", ".", "debug", "(", "\"Releasing write lock\"", ")", "self", ".", "_lock", ".", "release", "(", ")" ]
Acquire and release the lock around output calls. This should allow multiple threads or processes to write output reliably. Code that modifies the `_content` attribute should also do so within this context.
[ "Acquire", "and", "release", "the", "lock", "around", "output", "calls", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/interface.py#L214-L229
pyout/pyout
pyout/interface.py
Writer._start_callables
def _start_callables(self, row, callables): """Start running `callables` asynchronously. """ id_vals = {c: row[c] for c in self.ids} def callback(tab, cols, result): if isinstance(result, Mapping): pass elif isinstance(result, tuple): result = dict(zip(cols, result)) elif len(cols) == 1: # Don't bother raising an exception if cols != 1 # because it would be lost in the thread. result = {cols[0]: result} result.update(id_vals) tab._write(result) if self._pool is None: self._pool = Pool() if self._lock is None: self._lock = multiprocessing.Lock() for cols, fn in callables: cb_func = partial(callback, self, cols) gen = None if inspect.isgeneratorfunction(fn): gen = fn() elif inspect.isgenerator(fn): gen = fn if gen: def callback_for_each(): for i in gen: cb_func(i) self._pool.apply_async(callback_for_each) else: self._pool.apply_async(fn, callback=cb_func)
python
def _start_callables(self, row, callables): """Start running `callables` asynchronously. """ id_vals = {c: row[c] for c in self.ids} def callback(tab, cols, result): if isinstance(result, Mapping): pass elif isinstance(result, tuple): result = dict(zip(cols, result)) elif len(cols) == 1: # Don't bother raising an exception if cols != 1 # because it would be lost in the thread. result = {cols[0]: result} result.update(id_vals) tab._write(result) if self._pool is None: self._pool = Pool() if self._lock is None: self._lock = multiprocessing.Lock() for cols, fn in callables: cb_func = partial(callback, self, cols) gen = None if inspect.isgeneratorfunction(fn): gen = fn() elif inspect.isgenerator(fn): gen = fn if gen: def callback_for_each(): for i in gen: cb_func(i) self._pool.apply_async(callback_for_each) else: self._pool.apply_async(fn, callback=cb_func)
[ "def", "_start_callables", "(", "self", ",", "row", ",", "callables", ")", ":", "id_vals", "=", "{", "c", ":", "row", "[", "c", "]", "for", "c", "in", "self", ".", "ids", "}", "def", "callback", "(", "tab", ",", "cols", ",", "result", ")", ":", "if", "isinstance", "(", "result", ",", "Mapping", ")", ":", "pass", "elif", "isinstance", "(", "result", ",", "tuple", ")", ":", "result", "=", "dict", "(", "zip", "(", "cols", ",", "result", ")", ")", "elif", "len", "(", "cols", ")", "==", "1", ":", "# Don't bother raising an exception if cols != 1", "# because it would be lost in the thread.", "result", "=", "{", "cols", "[", "0", "]", ":", "result", "}", "result", ".", "update", "(", "id_vals", ")", "tab", ".", "_write", "(", "result", ")", "if", "self", ".", "_pool", "is", "None", ":", "self", ".", "_pool", "=", "Pool", "(", ")", "if", "self", ".", "_lock", "is", "None", ":", "self", ".", "_lock", "=", "multiprocessing", ".", "Lock", "(", ")", "for", "cols", ",", "fn", "in", "callables", ":", "cb_func", "=", "partial", "(", "callback", ",", "self", ",", "cols", ")", "gen", "=", "None", "if", "inspect", ".", "isgeneratorfunction", "(", "fn", ")", ":", "gen", "=", "fn", "(", ")", "elif", "inspect", ".", "isgenerator", "(", "fn", ")", ":", "gen", "=", "fn", "if", "gen", ":", "def", "callback_for_each", "(", ")", ":", "for", "i", "in", "gen", ":", "cb_func", "(", "i", ")", "self", ".", "_pool", ".", "apply_async", "(", "callback_for_each", ")", "else", ":", "self", ".", "_pool", ".", "apply_async", "(", "fn", ",", "callback", "=", "cb_func", ")" ]
Start running `callables` asynchronously.
[ "Start", "running", "callables", "asynchronously", "." ]
train
https://github.com/pyout/pyout/blob/d9ff954bdedb6fc70f21f4fe77ad4bf926b201b0/pyout/interface.py#L292-L329
smnorris/pgdata
pgdata/database.py
Database.schemas
def schemas(self): """ Get a listing of all non-system schemas (prefixed with 'pg_') that exist in the database. """ sql = """SELECT schema_name FROM information_schema.schemata ORDER BY schema_name""" schemas = self.query(sql).fetchall() return [s[0] for s in schemas if s[0][:3] != "pg_"]
python
def schemas(self): """ Get a listing of all non-system schemas (prefixed with 'pg_') that exist in the database. """ sql = """SELECT schema_name FROM information_schema.schemata ORDER BY schema_name""" schemas = self.query(sql).fetchall() return [s[0] for s in schemas if s[0][:3] != "pg_"]
[ "def", "schemas", "(", "self", ")", ":", "sql", "=", "\"\"\"SELECT schema_name FROM information_schema.schemata\n ORDER BY schema_name\"\"\"", "schemas", "=", "self", ".", "query", "(", "sql", ")", ".", "fetchall", "(", ")", "return", "[", "s", "[", "0", "]", "for", "s", "in", "schemas", "if", "s", "[", "0", "]", "[", ":", "3", "]", "!=", "\"pg_\"", "]" ]
Get a listing of all non-system schemas (prefixed with 'pg_') that exist in the database.
[ "Get", "a", "listing", "of", "all", "non", "-", "system", "schemas", "(", "prefixed", "with", "pg_", ")", "that", "exist", "in", "the", "database", "." ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L47-L55
smnorris/pgdata
pgdata/database.py
Database.tables
def tables(self): """ Get a listing of all tables - if schema specified on connect, return unqualifed table names in that schema - in no schema specified on connect, return all tables, with schema prefixes """ if self.schema: return self.tables_in_schema(self.schema) else: tables = [] for schema in self.schemas: tables = tables + [ schema + "." + t for t in self.tables_in_schema(schema) ] return tables
python
def tables(self): """ Get a listing of all tables - if schema specified on connect, return unqualifed table names in that schema - in no schema specified on connect, return all tables, with schema prefixes """ if self.schema: return self.tables_in_schema(self.schema) else: tables = [] for schema in self.schemas: tables = tables + [ schema + "." + t for t in self.tables_in_schema(schema) ] return tables
[ "def", "tables", "(", "self", ")", ":", "if", "self", ".", "schema", ":", "return", "self", ".", "tables_in_schema", "(", "self", ".", "schema", ")", "else", ":", "tables", "=", "[", "]", "for", "schema", "in", "self", ".", "schemas", ":", "tables", "=", "tables", "+", "[", "schema", "+", "\".\"", "+", "t", "for", "t", "in", "self", ".", "tables_in_schema", "(", "schema", ")", "]", "return", "tables" ]
Get a listing of all tables - if schema specified on connect, return unqualifed table names in that schema - in no schema specified on connect, return all tables, with schema prefixes
[ "Get", "a", "listing", "of", "all", "tables", "-", "if", "schema", "specified", "on", "connect", "return", "unqualifed", "table", "names", "in", "that", "schema", "-", "in", "no", "schema", "specified", "on", "connect", "return", "all", "tables", "with", "schema", "prefixes" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L58-L74
smnorris/pgdata
pgdata/database.py
Database._valid_table_name
def _valid_table_name(self, table): """Check if the table name is obviously invalid. """ if table is None or not len(table.strip()): raise ValueError("Invalid table name: %r" % table) return table.strip()
python
def _valid_table_name(self, table): """Check if the table name is obviously invalid. """ if table is None or not len(table.strip()): raise ValueError("Invalid table name: %r" % table) return table.strip()
[ "def", "_valid_table_name", "(", "self", ",", "table", ")", ":", "if", "table", "is", "None", "or", "not", "len", "(", "table", ".", "strip", "(", ")", ")", ":", "raise", "ValueError", "(", "\"Invalid table name: %r\"", "%", "table", ")", "return", "table", ".", "strip", "(", ")" ]
Check if the table name is obviously invalid.
[ "Check", "if", "the", "table", "name", "is", "obviously", "invalid", "." ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L87-L92
smnorris/pgdata
pgdata/database.py
Database.build_query
def build_query(self, sql, lookup): """ Modify table and field name variables in a sql string with a dict. This seems to be discouraged by psycopg2 docs but it makes small adjustments to large sql strings much easier, making prepped queries much more versatile. USAGE sql = 'SELECT $myInputField FROM $myInputTable' lookup = {'myInputField':'customer_id', 'myInputTable':'customers'} sql = db.build_query(sql, lookup) """ for key, val in six.iteritems(lookup): sql = sql.replace("$" + key, val) return sql
python
def build_query(self, sql, lookup): """ Modify table and field name variables in a sql string with a dict. This seems to be discouraged by psycopg2 docs but it makes small adjustments to large sql strings much easier, making prepped queries much more versatile. USAGE sql = 'SELECT $myInputField FROM $myInputTable' lookup = {'myInputField':'customer_id', 'myInputTable':'customers'} sql = db.build_query(sql, lookup) """ for key, val in six.iteritems(lookup): sql = sql.replace("$" + key, val) return sql
[ "def", "build_query", "(", "self", ",", "sql", ",", "lookup", ")", ":", "for", "key", ",", "val", "in", "six", ".", "iteritems", "(", "lookup", ")", ":", "sql", "=", "sql", ".", "replace", "(", "\"$\"", "+", "key", ",", "val", ")", "return", "sql" ]
Modify table and field name variables in a sql string with a dict. This seems to be discouraged by psycopg2 docs but it makes small adjustments to large sql strings much easier, making prepped queries much more versatile. USAGE sql = 'SELECT $myInputField FROM $myInputTable' lookup = {'myInputField':'customer_id', 'myInputTable':'customers'} sql = db.build_query(sql, lookup)
[ "Modify", "table", "and", "field", "name", "variables", "in", "a", "sql", "string", "with", "a", "dict", ".", "This", "seems", "to", "be", "discouraged", "by", "psycopg2", "docs", "but", "it", "makes", "small", "adjustments", "to", "large", "sql", "strings", "much", "easier", "making", "prepped", "queries", "much", "more", "versatile", "." ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L94-L109
smnorris/pgdata
pgdata/database.py
Database.tables_in_schema
def tables_in_schema(self, schema): """Get a listing of all tables in given schema """ sql = """SELECT table_name FROM information_schema.tables WHERE table_schema = %s""" return [t[0] for t in self.query(sql, (schema,)).fetchall()]
python
def tables_in_schema(self, schema): """Get a listing of all tables in given schema """ sql = """SELECT table_name FROM information_schema.tables WHERE table_schema = %s""" return [t[0] for t in self.query(sql, (schema,)).fetchall()]
[ "def", "tables_in_schema", "(", "self", ",", "schema", ")", ":", "sql", "=", "\"\"\"SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = %s\"\"\"", "return", "[", "t", "[", "0", "]", "for", "t", "in", "self", ".", "query", "(", "sql", ",", "(", "schema", ",", ")", ")", ".", "fetchall", "(", ")", "]" ]
Get a listing of all tables in given schema
[ "Get", "a", "listing", "of", "all", "tables", "in", "given", "schema" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L111-L117
smnorris/pgdata
pgdata/database.py
Database.parse_table_name
def parse_table_name(self, table): """Parse schema qualified table name """ if "." in table: schema, table = table.split(".") else: schema = None return (schema, table)
python
def parse_table_name(self, table): """Parse schema qualified table name """ if "." in table: schema, table = table.split(".") else: schema = None return (schema, table)
[ "def", "parse_table_name", "(", "self", ",", "table", ")", ":", "if", "\".\"", "in", "table", ":", "schema", ",", "table", "=", "table", ".", "split", "(", "\".\"", ")", "else", ":", "schema", "=", "None", "return", "(", "schema", ",", "table", ")" ]
Parse schema qualified table name
[ "Parse", "schema", "qualified", "table", "name" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L119-L126
smnorris/pgdata
pgdata/database.py
Database.load_table
def load_table(self, table): """Loads a table. Returns None if the table does not already exist in db """ table = self._valid_table_name(table) schema, table = self.parse_table_name(table) if not schema: schema = self.schema tables = self.tables else: tables = self.tables_in_schema(schema) if table in tables: return Table(self, schema, table) else: return None
python
def load_table(self, table): """Loads a table. Returns None if the table does not already exist in db """ table = self._valid_table_name(table) schema, table = self.parse_table_name(table) if not schema: schema = self.schema tables = self.tables else: tables = self.tables_in_schema(schema) if table in tables: return Table(self, schema, table) else: return None
[ "def", "load_table", "(", "self", ",", "table", ")", ":", "table", "=", "self", ".", "_valid_table_name", "(", "table", ")", "schema", ",", "table", "=", "self", ".", "parse_table_name", "(", "table", ")", "if", "not", "schema", ":", "schema", "=", "self", ".", "schema", "tables", "=", "self", ".", "tables", "else", ":", "tables", "=", "self", ".", "tables_in_schema", "(", "schema", ")", "if", "table", "in", "tables", ":", "return", "Table", "(", "self", ",", "schema", ",", "table", ")", "else", ":", "return", "None" ]
Loads a table. Returns None if the table does not already exist in db
[ "Loads", "a", "table", ".", "Returns", "None", "if", "the", "table", "does", "not", "already", "exist", "in", "db" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L128-L141
smnorris/pgdata
pgdata/database.py
Database.mogrify
def mogrify(self, sql, params): """Return the query string with parameters added """ conn = self.engine.raw_connection() cursor = conn.cursor() return cursor.mogrify(sql, params)
python
def mogrify(self, sql, params): """Return the query string with parameters added """ conn = self.engine.raw_connection() cursor = conn.cursor() return cursor.mogrify(sql, params)
[ "def", "mogrify", "(", "self", ",", "sql", ",", "params", ")", ":", "conn", "=", "self", ".", "engine", ".", "raw_connection", "(", ")", "cursor", "=", "conn", ".", "cursor", "(", ")", "return", "cursor", ".", "mogrify", "(", "sql", ",", "params", ")" ]
Return the query string with parameters added
[ "Return", "the", "query", "string", "with", "parameters", "added" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L143-L148
smnorris/pgdata
pgdata/database.py
Database.execute
def execute(self, sql, params=None): """Just a pointer to engine.execute """ # wrap in a transaction to ensure things are committed # https://github.com/smnorris/pgdata/issues/3 with self.engine.begin() as conn: result = conn.execute(sql, params) return result
python
def execute(self, sql, params=None): """Just a pointer to engine.execute """ # wrap in a transaction to ensure things are committed # https://github.com/smnorris/pgdata/issues/3 with self.engine.begin() as conn: result = conn.execute(sql, params) return result
[ "def", "execute", "(", "self", ",", "sql", ",", "params", "=", "None", ")", ":", "# wrap in a transaction to ensure things are committed", "# https://github.com/smnorris/pgdata/issues/3", "with", "self", ".", "engine", ".", "begin", "(", ")", "as", "conn", ":", "result", "=", "conn", ".", "execute", "(", "sql", ",", "params", ")", "return", "result" ]
Just a pointer to engine.execute
[ "Just", "a", "pointer", "to", "engine", ".", "execute" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L150-L157
smnorris/pgdata
pgdata/database.py
Database.query_one
def query_one(self, sql, params=None): """Grab just one record """ r = self.engine.execute(sql, params) return r.fetchone()
python
def query_one(self, sql, params=None): """Grab just one record """ r = self.engine.execute(sql, params) return r.fetchone()
[ "def", "query_one", "(", "self", ",", "sql", ",", "params", "=", "None", ")", ":", "r", "=", "self", ".", "engine", ".", "execute", "(", "sql", ",", "params", ")", "return", "r", ".", "fetchone", "(", ")" ]
Grab just one record
[ "Grab", "just", "one", "record" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L169-L173
smnorris/pgdata
pgdata/database.py
Database.create_schema
def create_schema(self, schema): """Create specified schema if it does not already exist """ if schema not in self.schemas: sql = "CREATE SCHEMA " + schema self.execute(sql)
python
def create_schema(self, schema): """Create specified schema if it does not already exist """ if schema not in self.schemas: sql = "CREATE SCHEMA " + schema self.execute(sql)
[ "def", "create_schema", "(", "self", ",", "schema", ")", ":", "if", "schema", "not", "in", "self", ".", "schemas", ":", "sql", "=", "\"CREATE SCHEMA \"", "+", "schema", "self", ".", "execute", "(", "sql", ")" ]
Create specified schema if it does not already exist
[ "Create", "specified", "schema", "if", "it", "does", "not", "already", "exist" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L175-L180
smnorris/pgdata
pgdata/database.py
Database.drop_schema
def drop_schema(self, schema, cascade=False): """Drop specified schema """ if schema in self.schemas: sql = "DROP SCHEMA " + schema if cascade: sql = sql + " CASCADE" self.execute(sql)
python
def drop_schema(self, schema, cascade=False): """Drop specified schema """ if schema in self.schemas: sql = "DROP SCHEMA " + schema if cascade: sql = sql + " CASCADE" self.execute(sql)
[ "def", "drop_schema", "(", "self", ",", "schema", ",", "cascade", "=", "False", ")", ":", "if", "schema", "in", "self", ".", "schemas", ":", "sql", "=", "\"DROP SCHEMA \"", "+", "schema", "if", "cascade", ":", "sql", "=", "sql", "+", "\" CASCADE\"", "self", ".", "execute", "(", "sql", ")" ]
Drop specified schema
[ "Drop", "specified", "schema" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L182-L189
smnorris/pgdata
pgdata/database.py
Database.create_table
def create_table(self, table, columns): """Creates a table """ schema, table = self.parse_table_name(table) table = self._valid_table_name(table) if not schema: schema = self.schema if table in self.tables: return Table(self, schema, table) else: return Table(self, schema, table, columns)
python
def create_table(self, table, columns): """Creates a table """ schema, table = self.parse_table_name(table) table = self._valid_table_name(table) if not schema: schema = self.schema if table in self.tables: return Table(self, schema, table) else: return Table(self, schema, table, columns)
[ "def", "create_table", "(", "self", ",", "table", ",", "columns", ")", ":", "schema", ",", "table", "=", "self", ".", "parse_table_name", "(", "table", ")", "table", "=", "self", ".", "_valid_table_name", "(", "table", ")", "if", "not", "schema", ":", "schema", "=", "self", ".", "schema", "if", "table", "in", "self", ".", "tables", ":", "return", "Table", "(", "self", ",", "schema", ",", "table", ")", "else", ":", "return", "Table", "(", "self", ",", "schema", ",", "table", ",", "columns", ")" ]
Creates a table
[ "Creates", "a", "table" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L197-L207
smnorris/pgdata
pgdata/database.py
Database.ogr2pg
def ogr2pg( self, in_file, in_layer=None, out_layer=None, schema="public", s_srs=None, t_srs="EPSG:3005", sql=None, dim=2, cmd_only=False, index=True ): """ Load a layer to provided pgdata database connection using OGR2OGR -sql option is like an ESRI where_clause or the ogr2ogr -where option, but to increase flexibility, it is in SQLITE dialect: SELECT * FROM <in_layer> WHERE <sql> """ # if not provided a layer name, use the name of the input file if not in_layer: in_layer = os.path.splitext(os.path.basename(in_file))[0] if not out_layer: out_layer = in_layer.lower() command = [ "ogr2ogr", "-t_srs", t_srs, "-f", "PostgreSQL", "PG:host={h} user={u} dbname={db} password={pwd}".format( h=self.host, u=self.user, db=self.database, pwd=self.password ), "-lco", "OVERWRITE=YES", "-overwrite", "-lco", "SCHEMA={schema}".format(schema=schema), "-lco", "GEOMETRY_NAME=geom", "-dim", "{d}".format(d=dim), "-nlt", "PROMOTE_TO_MULTI", "-nln", out_layer, in_file ] if sql: command.insert( len(command), "-sql" ) command.insert( len(command), "SELECT * FROM {} WHERE {}".format(in_layer, sql) ) command.insert(len(command), "-dialect") command.insert(len(command), "SQLITE") # only add output layer name if sql not included (it gets ignored) if not sql: command.insert( len(command), in_layer ) if s_srs: command.insert(len(command), "-s_srs") command.insert(len(command), s_srs) if not index: command.insert(len(command), "-lco") command.insert(len(command), "SPATIAL_INDEX=NO") if cmd_only: return " ".join(command) else: subprocess.run(command)
python
def ogr2pg( self, in_file, in_layer=None, out_layer=None, schema="public", s_srs=None, t_srs="EPSG:3005", sql=None, dim=2, cmd_only=False, index=True ): """ Load a layer to provided pgdata database connection using OGR2OGR -sql option is like an ESRI where_clause or the ogr2ogr -where option, but to increase flexibility, it is in SQLITE dialect: SELECT * FROM <in_layer> WHERE <sql> """ # if not provided a layer name, use the name of the input file if not in_layer: in_layer = os.path.splitext(os.path.basename(in_file))[0] if not out_layer: out_layer = in_layer.lower() command = [ "ogr2ogr", "-t_srs", t_srs, "-f", "PostgreSQL", "PG:host={h} user={u} dbname={db} password={pwd}".format( h=self.host, u=self.user, db=self.database, pwd=self.password ), "-lco", "OVERWRITE=YES", "-overwrite", "-lco", "SCHEMA={schema}".format(schema=schema), "-lco", "GEOMETRY_NAME=geom", "-dim", "{d}".format(d=dim), "-nlt", "PROMOTE_TO_MULTI", "-nln", out_layer, in_file ] if sql: command.insert( len(command), "-sql" ) command.insert( len(command), "SELECT * FROM {} WHERE {}".format(in_layer, sql) ) command.insert(len(command), "-dialect") command.insert(len(command), "SQLITE") # only add output layer name if sql not included (it gets ignored) if not sql: command.insert( len(command), in_layer ) if s_srs: command.insert(len(command), "-s_srs") command.insert(len(command), s_srs) if not index: command.insert(len(command), "-lco") command.insert(len(command), "SPATIAL_INDEX=NO") if cmd_only: return " ".join(command) else: subprocess.run(command)
[ "def", "ogr2pg", "(", "self", ",", "in_file", ",", "in_layer", "=", "None", ",", "out_layer", "=", "None", ",", "schema", "=", "\"public\"", ",", "s_srs", "=", "None", ",", "t_srs", "=", "\"EPSG:3005\"", ",", "sql", "=", "None", ",", "dim", "=", "2", ",", "cmd_only", "=", "False", ",", "index", "=", "True", ")", ":", "# if not provided a layer name, use the name of the input file", "if", "not", "in_layer", ":", "in_layer", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "in_file", ")", ")", "[", "0", "]", "if", "not", "out_layer", ":", "out_layer", "=", "in_layer", ".", "lower", "(", ")", "command", "=", "[", "\"ogr2ogr\"", ",", "\"-t_srs\"", ",", "t_srs", ",", "\"-f\"", ",", "\"PostgreSQL\"", ",", "\"PG:host={h} user={u} dbname={db} password={pwd}\"", ".", "format", "(", "h", "=", "self", ".", "host", ",", "u", "=", "self", ".", "user", ",", "db", "=", "self", ".", "database", ",", "pwd", "=", "self", ".", "password", ")", ",", "\"-lco\"", ",", "\"OVERWRITE=YES\"", ",", "\"-overwrite\"", ",", "\"-lco\"", ",", "\"SCHEMA={schema}\"", ".", "format", "(", "schema", "=", "schema", ")", ",", "\"-lco\"", ",", "\"GEOMETRY_NAME=geom\"", ",", "\"-dim\"", ",", "\"{d}\"", ".", "format", "(", "d", "=", "dim", ")", ",", "\"-nlt\"", ",", "\"PROMOTE_TO_MULTI\"", ",", "\"-nln\"", ",", "out_layer", ",", "in_file", "]", "if", "sql", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-sql\"", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"SELECT * FROM {} WHERE {}\"", ".", "format", "(", "in_layer", ",", "sql", ")", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-dialect\"", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"SQLITE\"", ")", "# only add output layer name if sql not included (it gets ignored)", "if", "not", "sql", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "in_layer", ")", "if", "s_srs", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-s_srs\"", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "s_srs", ")", "if", "not", "index", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-lco\"", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"SPATIAL_INDEX=NO\"", ")", "if", "cmd_only", ":", "return", "\" \"", ".", "join", "(", "command", ")", "else", ":", "subprocess", ".", "run", "(", "command", ")" ]
Load a layer to provided pgdata database connection using OGR2OGR -sql option is like an ESRI where_clause or the ogr2ogr -where option, but to increase flexibility, it is in SQLITE dialect: SELECT * FROM <in_layer> WHERE <sql>
[ "Load", "a", "layer", "to", "provided", "pgdata", "database", "connection", "using", "OGR2OGR" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L209-L284
smnorris/pgdata
pgdata/database.py
Database.pg2ogr
def pg2ogr( self, sql, driver, outfile, outlayer=None, column_remap=None, s_srs="EPSG:3005", t_srs=None, geom_type=None, append=False, ): """ A wrapper around ogr2ogr, for quickly dumping a postgis query to file. Suppported formats are ["ESRI Shapefile", "GeoJSON", "FileGDB", "GPKG"] - for GeoJSON, transforms to EPSG:4326 - for Shapefile, consider supplying a column_remap dict - for FileGDB, geom_type is required (https://trac.osgeo.org/gdal/ticket/4186) """ if driver == "FileGDB" and geom_type is None: raise ValueError("Specify geom_type when writing to FileGDB") filename, ext = os.path.splitext(os.path.basename(outfile)) if not outlayer: outlayer = filename u = urlparse(self.url) pgcred = "host={h} user={u} dbname={db} password={p}".format( h=u.hostname, u=u.username, db=u.path[1:], p=u.password ) # use a VRT so we can remap columns if a lookoup is provided if column_remap: # if specifiying output field names, all fields have to be specified # rather than try and parse the input sql, just do a test run of the # query and grab column names from that columns = [c for c in self.query(sql).keys() if c != "geom"] # make sure all columns are represented in the remap for c in columns: if c not in column_remap.keys(): column_remap[c] = c field_remap_xml = " \n".join( [ '<Field name="' + column_remap[c] + '" src="' + c + '"/>' for c in columns ] ) else: field_remap_xml = "" vrt = """<OGRVRTDataSource> <OGRVRTLayer name="{layer}"> <SrcDataSource>PG:{pgcred}</SrcDataSource> <SrcSQL>{sql}</SrcSQL> {fieldremap} </OGRVRTLayer> </OGRVRTDataSource> """.format( layer=outlayer, sql=escape(sql.replace("\n", " ")), pgcred=pgcred, fieldremap=field_remap_xml, ) vrtpath = os.path.join(tempfile.gettempdir(), filename + ".vrt") if os.path.exists(vrtpath): os.remove(vrtpath) with open(vrtpath, "w") as vrtfile: vrtfile.write(vrt) # GeoJSON writes to EPSG:4326 if driver == 'GeoJSON' and not t_srs: t_srs = "EPSG:4326" # otherwise, default to BC Albers else: t_srs = "EPSG:3005" command = [ "ogr2ogr", "-s_srs", s_srs, "-t_srs", t_srs, "-progress", "-f", driver, outfile, vrtpath ] # if writing to gdb, specify geom type if driver == "FileGDB": command.insert( len(command), "-nlt" ) command.insert( len(command), geom_type ) # automatically update existing multilayer outputs if driver in ("FileGDB", "GPKG") and os.path.exists(outfile): command.insert( len(command), "-update" ) # if specified, append to existing output if append: command.insert( len(command), "-append" ) subprocess.run(command)
python
def pg2ogr( self, sql, driver, outfile, outlayer=None, column_remap=None, s_srs="EPSG:3005", t_srs=None, geom_type=None, append=False, ): """ A wrapper around ogr2ogr, for quickly dumping a postgis query to file. Suppported formats are ["ESRI Shapefile", "GeoJSON", "FileGDB", "GPKG"] - for GeoJSON, transforms to EPSG:4326 - for Shapefile, consider supplying a column_remap dict - for FileGDB, geom_type is required (https://trac.osgeo.org/gdal/ticket/4186) """ if driver == "FileGDB" and geom_type is None: raise ValueError("Specify geom_type when writing to FileGDB") filename, ext = os.path.splitext(os.path.basename(outfile)) if not outlayer: outlayer = filename u = urlparse(self.url) pgcred = "host={h} user={u} dbname={db} password={p}".format( h=u.hostname, u=u.username, db=u.path[1:], p=u.password ) # use a VRT so we can remap columns if a lookoup is provided if column_remap: # if specifiying output field names, all fields have to be specified # rather than try and parse the input sql, just do a test run of the # query and grab column names from that columns = [c for c in self.query(sql).keys() if c != "geom"] # make sure all columns are represented in the remap for c in columns: if c not in column_remap.keys(): column_remap[c] = c field_remap_xml = " \n".join( [ '<Field name="' + column_remap[c] + '" src="' + c + '"/>' for c in columns ] ) else: field_remap_xml = "" vrt = """<OGRVRTDataSource> <OGRVRTLayer name="{layer}"> <SrcDataSource>PG:{pgcred}</SrcDataSource> <SrcSQL>{sql}</SrcSQL> {fieldremap} </OGRVRTLayer> </OGRVRTDataSource> """.format( layer=outlayer, sql=escape(sql.replace("\n", " ")), pgcred=pgcred, fieldremap=field_remap_xml, ) vrtpath = os.path.join(tempfile.gettempdir(), filename + ".vrt") if os.path.exists(vrtpath): os.remove(vrtpath) with open(vrtpath, "w") as vrtfile: vrtfile.write(vrt) # GeoJSON writes to EPSG:4326 if driver == 'GeoJSON' and not t_srs: t_srs = "EPSG:4326" # otherwise, default to BC Albers else: t_srs = "EPSG:3005" command = [ "ogr2ogr", "-s_srs", s_srs, "-t_srs", t_srs, "-progress", "-f", driver, outfile, vrtpath ] # if writing to gdb, specify geom type if driver == "FileGDB": command.insert( len(command), "-nlt" ) command.insert( len(command), geom_type ) # automatically update existing multilayer outputs if driver in ("FileGDB", "GPKG") and os.path.exists(outfile): command.insert( len(command), "-update" ) # if specified, append to existing output if append: command.insert( len(command), "-append" ) subprocess.run(command)
[ "def", "pg2ogr", "(", "self", ",", "sql", ",", "driver", ",", "outfile", ",", "outlayer", "=", "None", ",", "column_remap", "=", "None", ",", "s_srs", "=", "\"EPSG:3005\"", ",", "t_srs", "=", "None", ",", "geom_type", "=", "None", ",", "append", "=", "False", ",", ")", ":", "if", "driver", "==", "\"FileGDB\"", "and", "geom_type", "is", "None", ":", "raise", "ValueError", "(", "\"Specify geom_type when writing to FileGDB\"", ")", "filename", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "outfile", ")", ")", "if", "not", "outlayer", ":", "outlayer", "=", "filename", "u", "=", "urlparse", "(", "self", ".", "url", ")", "pgcred", "=", "\"host={h} user={u} dbname={db} password={p}\"", ".", "format", "(", "h", "=", "u", ".", "hostname", ",", "u", "=", "u", ".", "username", ",", "db", "=", "u", ".", "path", "[", "1", ":", "]", ",", "p", "=", "u", ".", "password", ")", "# use a VRT so we can remap columns if a lookoup is provided", "if", "column_remap", ":", "# if specifiying output field names, all fields have to be specified", "# rather than try and parse the input sql, just do a test run of the", "# query and grab column names from that", "columns", "=", "[", "c", "for", "c", "in", "self", ".", "query", "(", "sql", ")", ".", "keys", "(", ")", "if", "c", "!=", "\"geom\"", "]", "# make sure all columns are represented in the remap", "for", "c", "in", "columns", ":", "if", "c", "not", "in", "column_remap", ".", "keys", "(", ")", ":", "column_remap", "[", "c", "]", "=", "c", "field_remap_xml", "=", "\" \\n\"", ".", "join", "(", "[", "'<Field name=\"'", "+", "column_remap", "[", "c", "]", "+", "'\" src=\"'", "+", "c", "+", "'\"/>'", "for", "c", "in", "columns", "]", ")", "else", ":", "field_remap_xml", "=", "\"\"", "vrt", "=", "\"\"\"<OGRVRTDataSource>\n <OGRVRTLayer name=\"{layer}\">\n <SrcDataSource>PG:{pgcred}</SrcDataSource>\n <SrcSQL>{sql}</SrcSQL>\n {fieldremap}\n </OGRVRTLayer>\n </OGRVRTDataSource>\n \"\"\"", ".", "format", "(", "layer", "=", "outlayer", ",", "sql", "=", "escape", "(", "sql", ".", "replace", "(", "\"\\n\"", ",", "\" \"", ")", ")", ",", "pgcred", "=", "pgcred", ",", "fieldremap", "=", "field_remap_xml", ",", ")", "vrtpath", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "filename", "+", "\".vrt\"", ")", "if", "os", ".", "path", ".", "exists", "(", "vrtpath", ")", ":", "os", ".", "remove", "(", "vrtpath", ")", "with", "open", "(", "vrtpath", ",", "\"w\"", ")", "as", "vrtfile", ":", "vrtfile", ".", "write", "(", "vrt", ")", "# GeoJSON writes to EPSG:4326", "if", "driver", "==", "'GeoJSON'", "and", "not", "t_srs", ":", "t_srs", "=", "\"EPSG:4326\"", "# otherwise, default to BC Albers", "else", ":", "t_srs", "=", "\"EPSG:3005\"", "command", "=", "[", "\"ogr2ogr\"", ",", "\"-s_srs\"", ",", "s_srs", ",", "\"-t_srs\"", ",", "t_srs", ",", "\"-progress\"", ",", "\"-f\"", ",", "driver", ",", "outfile", ",", "vrtpath", "]", "# if writing to gdb, specify geom type", "if", "driver", "==", "\"FileGDB\"", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-nlt\"", ")", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "geom_type", ")", "# automatically update existing multilayer outputs", "if", "driver", "in", "(", "\"FileGDB\"", ",", "\"GPKG\"", ")", "and", "os", ".", "path", ".", "exists", "(", "outfile", ")", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-update\"", ")", "# if specified, append to existing output", "if", "append", ":", "command", ".", "insert", "(", "len", "(", "command", ")", ",", "\"-append\"", ")", "subprocess", ".", "run", "(", "command", ")" ]
A wrapper around ogr2ogr, for quickly dumping a postgis query to file. Suppported formats are ["ESRI Shapefile", "GeoJSON", "FileGDB", "GPKG"] - for GeoJSON, transforms to EPSG:4326 - for Shapefile, consider supplying a column_remap dict - for FileGDB, geom_type is required (https://trac.osgeo.org/gdal/ticket/4186)
[ "A", "wrapper", "around", "ogr2ogr", "for", "quickly", "dumping", "a", "postgis", "query", "to", "file", ".", "Suppported", "formats", "are", "[", "ESRI", "Shapefile", "GeoJSON", "FileGDB", "GPKG", "]", "-", "for", "GeoJSON", "transforms", "to", "EPSG", ":", "4326", "-", "for", "Shapefile", "consider", "supplying", "a", "column_remap", "dict", "-", "for", "FileGDB", "geom_type", "is", "required", "(", "https", ":", "//", "trac", ".", "osgeo", ".", "org", "/", "gdal", "/", "ticket", "/", "4186", ")" ]
train
https://github.com/smnorris/pgdata/blob/8b0294024d5ef30b4ae9184888e2cc7004d1784e/pgdata/database.py#L286-L391
letuananh/chirptext
chirptext/cli.py
setup_logging
def setup_logging(filename, log_dir=None, force_setup=False): ''' Try to load logging configuration from a file. Set level to INFO if failed. ''' if not force_setup and ChirpCLI.SETUP_COMPLETED: logging.debug("Master logging has been setup. This call will be ignored.") return if log_dir and not os.path.exists(log_dir): os.makedirs(log_dir) if os.path.isfile(filename): with open(filename) as config_file: try: config = json.load(config_file) logging.config.dictConfig(config) logging.info("logging was setup using {}".format(filename)) ChirpCLI.SETUP_COMPLETED = True except Exception as e: logging.exception("Could not load logging config") # default logging config logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.INFO)
python
def setup_logging(filename, log_dir=None, force_setup=False): ''' Try to load logging configuration from a file. Set level to INFO if failed. ''' if not force_setup and ChirpCLI.SETUP_COMPLETED: logging.debug("Master logging has been setup. This call will be ignored.") return if log_dir and not os.path.exists(log_dir): os.makedirs(log_dir) if os.path.isfile(filename): with open(filename) as config_file: try: config = json.load(config_file) logging.config.dictConfig(config) logging.info("logging was setup using {}".format(filename)) ChirpCLI.SETUP_COMPLETED = True except Exception as e: logging.exception("Could not load logging config") # default logging config logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.INFO)
[ "def", "setup_logging", "(", "filename", ",", "log_dir", "=", "None", ",", "force_setup", "=", "False", ")", ":", "if", "not", "force_setup", "and", "ChirpCLI", ".", "SETUP_COMPLETED", ":", "logging", ".", "debug", "(", "\"Master logging has been setup. This call will be ignored.\"", ")", "return", "if", "log_dir", "and", "not", "os", ".", "path", ".", "exists", "(", "log_dir", ")", ":", "os", ".", "makedirs", "(", "log_dir", ")", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "with", "open", "(", "filename", ")", "as", "config_file", ":", "try", ":", "config", "=", "json", ".", "load", "(", "config_file", ")", "logging", ".", "config", ".", "dictConfig", "(", "config", ")", "logging", ".", "info", "(", "\"logging was setup using {}\"", ".", "format", "(", "filename", ")", ")", "ChirpCLI", ".", "SETUP_COMPLETED", "=", "True", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "\"Could not load logging config\"", ")", "# default logging config", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "INFO", ")", "else", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "INFO", ")" ]
Try to load logging configuration from a file. Set level to INFO if failed.
[ "Try", "to", "load", "logging", "configuration", "from", "a", "file", ".", "Set", "level", "to", "INFO", "if", "failed", "." ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L35-L55
letuananh/chirptext
chirptext/cli.py
config_logging
def config_logging(args): ''' Override root logger's level ''' if args.quiet: logging.getLogger().setLevel(logging.CRITICAL) elif args.verbose: logging.getLogger().setLevel(logging.DEBUG)
python
def config_logging(args): ''' Override root logger's level ''' if args.quiet: logging.getLogger().setLevel(logging.CRITICAL) elif args.verbose: logging.getLogger().setLevel(logging.DEBUG)
[ "def", "config_logging", "(", "args", ")", ":", "if", "args", ".", "quiet", ":", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "CRITICAL", ")", "elif", "args", ".", "verbose", ":", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")" ]
Override root logger's level
[ "Override", "root", "logger", "s", "level" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L58-L63
letuananh/chirptext
chirptext/cli.py
CLIApp.add_task
def add_task(self, task, func=None, **kwargs): ''' Add a task parser ''' if not self.__tasks: raise Exception("Tasks subparsers is disabled") if 'help' not in kwargs: if func.__doc__: kwargs['help'] = func.__doc__ task_parser = self.__tasks.add_parser(task, **kwargs) if self.__add_vq: self.add_vq(task_parser) if func is not None: task_parser.set_defaults(func=func) return task_parser
python
def add_task(self, task, func=None, **kwargs): ''' Add a task parser ''' if not self.__tasks: raise Exception("Tasks subparsers is disabled") if 'help' not in kwargs: if func.__doc__: kwargs['help'] = func.__doc__ task_parser = self.__tasks.add_parser(task, **kwargs) if self.__add_vq: self.add_vq(task_parser) if func is not None: task_parser.set_defaults(func=func) return task_parser
[ "def", "add_task", "(", "self", ",", "task", ",", "func", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "__tasks", ":", "raise", "Exception", "(", "\"Tasks subparsers is disabled\"", ")", "if", "'help'", "not", "in", "kwargs", ":", "if", "func", ".", "__doc__", ":", "kwargs", "[", "'help'", "]", "=", "func", ".", "__doc__", "task_parser", "=", "self", ".", "__tasks", ".", "add_parser", "(", "task", ",", "*", "*", "kwargs", ")", "if", "self", ".", "__add_vq", ":", "self", ".", "add_vq", "(", "task_parser", ")", "if", "func", "is", "not", "None", ":", "task_parser", ".", "set_defaults", "(", "func", "=", "func", ")", "return", "task_parser" ]
Add a task parser
[ "Add", "a", "task", "parser" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L92-L104
letuananh/chirptext
chirptext/cli.py
CLIApp.add_vq
def add_vq(self, parser): ''' Add verbose & quiet options ''' group = parser.add_mutually_exclusive_group() group.add_argument("-v", "--verbose", action="store_true") group.add_argument("-q", "--quiet", action="store_true")
python
def add_vq(self, parser): ''' Add verbose & quiet options ''' group = parser.add_mutually_exclusive_group() group.add_argument("-v", "--verbose", action="store_true") group.add_argument("-q", "--quiet", action="store_true")
[ "def", "add_vq", "(", "self", ",", "parser", ")", ":", "group", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbose\"", ",", "action", "=", "\"store_true\"", ")", "group", ".", "add_argument", "(", "\"-q\"", ",", "\"--quiet\"", ",", "action", "=", "\"store_true\"", ")" ]
Add verbose & quiet options
[ "Add", "verbose", "&", "quiet", "options" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L106-L110
letuananh/chirptext
chirptext/cli.py
CLIApp.add_version_func
def add_version_func(self, show_version): ''' Enable --version and -V to show version information ''' if callable(show_version): self.__show_version_func = show_version else: self.__show_version_func = lambda cli, args: print(show_version) self.parser.add_argument("-V", "--version", action="store_true")
python
def add_version_func(self, show_version): ''' Enable --version and -V to show version information ''' if callable(show_version): self.__show_version_func = show_version else: self.__show_version_func = lambda cli, args: print(show_version) self.parser.add_argument("-V", "--version", action="store_true")
[ "def", "add_version_func", "(", "self", ",", "show_version", ")", ":", "if", "callable", "(", "show_version", ")", ":", "self", ".", "__show_version_func", "=", "show_version", "else", ":", "self", ".", "__show_version_func", "=", "lambda", "cli", ",", "args", ":", "print", "(", "show_version", ")", "self", ".", "parser", ".", "add_argument", "(", "\"-V\"", ",", "\"--version\"", ",", "action", "=", "\"store_true\"", ")" ]
Enable --version and -V to show version information
[ "Enable", "--", "version", "and", "-", "V", "to", "show", "version", "information" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L112-L118
letuananh/chirptext
chirptext/cli.py
CLIApp.logger
def logger(self): ''' Lazy logger ''' if self.__logger is None: self.__logger = logging.getLogger(self.__name) return self.__logger
python
def logger(self): ''' Lazy logger ''' if self.__logger is None: self.__logger = logging.getLogger(self.__name) return self.__logger
[ "def", "logger", "(", "self", ")", ":", "if", "self", ".", "__logger", "is", "None", ":", "self", ".", "__logger", "=", "logging", ".", "getLogger", "(", "self", ".", "__name", ")", "return", "self", ".", "__logger" ]
Lazy logger
[ "Lazy", "logger" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L121-L125
letuananh/chirptext
chirptext/cli.py
CLIApp.run
def run(self, func=None): ''' Run the app ''' args = self.parser.parse_args() if self.__add_vq is not None and self.__config_logging: self.__config_logging(args) if self.__show_version_func and args.version and callable(self.__show_version_func): self.__show_version_func(self, args) elif args.func is not None: args.func(self, args) elif func is not None: func(self, args) else: self.parser.print_help()
python
def run(self, func=None): ''' Run the app ''' args = self.parser.parse_args() if self.__add_vq is not None and self.__config_logging: self.__config_logging(args) if self.__show_version_func and args.version and callable(self.__show_version_func): self.__show_version_func(self, args) elif args.func is not None: args.func(self, args) elif func is not None: func(self, args) else: self.parser.print_help()
[ "def", "run", "(", "self", ",", "func", "=", "None", ")", ":", "args", "=", "self", ".", "parser", ".", "parse_args", "(", ")", "if", "self", ".", "__add_vq", "is", "not", "None", "and", "self", ".", "__config_logging", ":", "self", ".", "__config_logging", "(", "args", ")", "if", "self", ".", "__show_version_func", "and", "args", ".", "version", "and", "callable", "(", "self", ".", "__show_version_func", ")", ":", "self", ".", "__show_version_func", "(", "self", ",", "args", ")", "elif", "args", ".", "func", "is", "not", "None", ":", "args", ".", "func", "(", "self", ",", "args", ")", "elif", "func", "is", "not", "None", ":", "func", "(", "self", ",", "args", ")", "else", ":", "self", ".", "parser", ".", "print_help", "(", ")" ]
Run the app
[ "Run", "the", "app" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/cli.py#L127-L139
letuananh/chirptext
chirptext/leutile.py
header
def header(*msg, level='h1', separator=" ", print_out=print): ''' Print header block in text mode ''' out_string = separator.join(str(x) for x in msg) if level == 'h0': # box_len = 80 if len(msg) < 80 else len(msg) box_len = 80 print_out('+' + '-' * (box_len + 2)) print_out("| %s" % out_string) print_out('+' + '-' * (box_len + 2)) elif level == 'h1': print_out("") print_out(out_string) print_out('-' * 60) elif level == 'h2': print_out('\t%s' % out_string) print_out('\t' + ('-' * 40)) else: print_out('\t\t%s' % out_string) print_out('\t\t' + ('-' * 20))
python
def header(*msg, level='h1', separator=" ", print_out=print): ''' Print header block in text mode ''' out_string = separator.join(str(x) for x in msg) if level == 'h0': # box_len = 80 if len(msg) < 80 else len(msg) box_len = 80 print_out('+' + '-' * (box_len + 2)) print_out("| %s" % out_string) print_out('+' + '-' * (box_len + 2)) elif level == 'h1': print_out("") print_out(out_string) print_out('-' * 60) elif level == 'h2': print_out('\t%s' % out_string) print_out('\t' + ('-' * 40)) else: print_out('\t\t%s' % out_string) print_out('\t\t' + ('-' * 20))
[ "def", "header", "(", "*", "msg", ",", "level", "=", "'h1'", ",", "separator", "=", "\" \"", ",", "print_out", "=", "print", ")", ":", "out_string", "=", "separator", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "msg", ")", "if", "level", "==", "'h0'", ":", "# box_len = 80 if len(msg) < 80 else len(msg)", "box_len", "=", "80", "print_out", "(", "'+'", "+", "'-'", "*", "(", "box_len", "+", "2", ")", ")", "print_out", "(", "\"| %s\"", "%", "out_string", ")", "print_out", "(", "'+'", "+", "'-'", "*", "(", "box_len", "+", "2", ")", ")", "elif", "level", "==", "'h1'", ":", "print_out", "(", "\"\"", ")", "print_out", "(", "out_string", ")", "print_out", "(", "'-'", "*", "60", ")", "elif", "level", "==", "'h2'", ":", "print_out", "(", "'\\t%s'", "%", "out_string", ")", "print_out", "(", "'\\t'", "+", "(", "'-'", "*", "40", ")", ")", "else", ":", "print_out", "(", "'\\t\\t%s'", "%", "out_string", ")", "print_out", "(", "'\\t\\t'", "+", "(", "'-'", "*", "20", ")", ")" ]
Print header block in text mode
[ "Print", "header", "block", "in", "text", "mode" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L49-L68
letuananh/chirptext
chirptext/leutile.py
piter.fetch
def fetch(self, value_obj=None): ''' Fetch the next two values ''' val = None try: val = next(self.__iterable) except StopIteration: return None if value_obj is None: value_obj = Value(value=val) else: value_obj.value = val return value_obj
python
def fetch(self, value_obj=None): ''' Fetch the next two values ''' val = None try: val = next(self.__iterable) except StopIteration: return None if value_obj is None: value_obj = Value(value=val) else: value_obj.value = val return value_obj
[ "def", "fetch", "(", "self", ",", "value_obj", "=", "None", ")", ":", "val", "=", "None", "try", ":", "val", "=", "next", "(", "self", ".", "__iterable", ")", "except", "StopIteration", ":", "return", "None", "if", "value_obj", "is", "None", ":", "value_obj", "=", "Value", "(", "value", "=", "val", ")", "else", ":", "value_obj", ".", "value", "=", "val", "return", "value_obj" ]
Fetch the next two values
[ "Fetch", "the", "next", "two", "values" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L109-L120
letuananh/chirptext
chirptext/leutile.py
Counter.get_report_order
def get_report_order(self): ''' Keys are sorted based on report order (i.e. some keys to be shown first) Related: see sorted_by_count ''' order_list = [] for x in self.__priority: order_list.append([x, self[x]]) for x in sorted(list(self.keys())): if x not in self.__priority: order_list.append([x, self[x]]) return order_list
python
def get_report_order(self): ''' Keys are sorted based on report order (i.e. some keys to be shown first) Related: see sorted_by_count ''' order_list = [] for x in self.__priority: order_list.append([x, self[x]]) for x in sorted(list(self.keys())): if x not in self.__priority: order_list.append([x, self[x]]) return order_list
[ "def", "get_report_order", "(", "self", ")", ":", "order_list", "=", "[", "]", "for", "x", "in", "self", ".", "__priority", ":", "order_list", ".", "append", "(", "[", "x", ",", "self", "[", "x", "]", "]", ")", "for", "x", "in", "sorted", "(", "list", "(", "self", ".", "keys", "(", ")", ")", ")", ":", "if", "x", "not", "in", "self", ".", "__priority", ":", "order_list", ".", "append", "(", "[", "x", ",", "self", "[", "x", "]", "]", ")", "return", "order_list" ]
Keys are sorted based on report order (i.e. some keys to be shown first) Related: see sorted_by_count
[ "Keys", "are", "sorted", "based", "on", "report", "order", "(", "i", ".", "e", ".", "some", "keys", "to", "be", "shown", "first", ")", "Related", ":", "see", "sorted_by_count" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L157-L167
letuananh/chirptext
chirptext/leutile.py
TextReport.content
def content(self): ''' Return report content as a string if mode == STRINGIO else an empty string ''' if isinstance(self.__report_file, io.StringIO): return self.__report_file.getvalue() else: return ''
python
def content(self): ''' Return report content as a string if mode == STRINGIO else an empty string ''' if isinstance(self.__report_file, io.StringIO): return self.__report_file.getvalue() else: return ''
[ "def", "content", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "__report_file", ",", "io", ".", "StringIO", ")", ":", "return", "self", ".", "__report_file", ".", "getvalue", "(", ")", "else", ":", "return", "''" ]
Return report content as a string if mode == STRINGIO else an empty string
[ "Return", "report", "content", "as", "a", "string", "if", "mode", "==", "STRINGIO", "else", "an", "empty", "string" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L317-L322
letuananh/chirptext
chirptext/leutile.py
Table.format
def format(self): ''' Format table to print out ''' self.max_lengths = [] for row in self.rows: if len(self.max_lengths) < len(row): self.max_lengths += [0] * (len(row) - len(self.max_lengths)) for idx, val in enumerate(row): len_cell = len(str(val)) if val else 0 if self.max_lengths[idx] < len_cell: self.max_lengths[idx] = len_cell return self.max_lengths
python
def format(self): ''' Format table to print out ''' self.max_lengths = [] for row in self.rows: if len(self.max_lengths) < len(row): self.max_lengths += [0] * (len(row) - len(self.max_lengths)) for idx, val in enumerate(row): len_cell = len(str(val)) if val else 0 if self.max_lengths[idx] < len_cell: self.max_lengths[idx] = len_cell return self.max_lengths
[ "def", "format", "(", "self", ")", ":", "self", ".", "max_lengths", "=", "[", "]", "for", "row", "in", "self", ".", "rows", ":", "if", "len", "(", "self", ".", "max_lengths", ")", "<", "len", "(", "row", ")", ":", "self", ".", "max_lengths", "+=", "[", "0", "]", "*", "(", "len", "(", "row", ")", "-", "len", "(", "self", ".", "max_lengths", ")", ")", "for", "idx", ",", "val", "in", "enumerate", "(", "row", ")", ":", "len_cell", "=", "len", "(", "str", "(", "val", ")", ")", "if", "val", "else", "0", "if", "self", ".", "max_lengths", "[", "idx", "]", "<", "len_cell", ":", "self", ".", "max_lengths", "[", "idx", "]", "=", "len_cell", "return", "self", ".", "max_lengths" ]
Format table to print out
[ "Format", "table", "to", "print", "out" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L446-L457
letuananh/chirptext
chirptext/leutile.py
FileHelper.getfullfilename
def getfullfilename(file_path): ''' Get full filename (with extension) ''' warnings.warn("getfullfilename() is deprecated and will be removed in near future. Use chirptext.io.write_file() instead", DeprecationWarning) if file_path: return os.path.basename(file_path) else: return ''
python
def getfullfilename(file_path): ''' Get full filename (with extension) ''' warnings.warn("getfullfilename() is deprecated and will be removed in near future. Use chirptext.io.write_file() instead", DeprecationWarning) if file_path: return os.path.basename(file_path) else: return ''
[ "def", "getfullfilename", "(", "file_path", ")", ":", "warnings", ".", "warn", "(", "\"getfullfilename() is deprecated and will be removed in near future. Use chirptext.io.write_file() instead\"", ",", "DeprecationWarning", ")", "if", "file_path", ":", "return", "os", ".", "path", ".", "basename", "(", "file_path", ")", "else", ":", "return", "''" ]
Get full filename (with extension)
[ "Get", "full", "filename", "(", "with", "extension", ")" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L499-L506
letuananh/chirptext
chirptext/leutile.py
FileHelper.replace_ext
def replace_ext(file_path, ext): ''' Change extension of a file_path to something else (provide None to remove) ''' if not file_path: raise Exception("File path cannot be empty") dirname = os.path.dirname(file_path) filename = FileHelper.getfilename(file_path) if ext: filename = filename + '.' + ext return os.path.join(dirname, filename)
python
def replace_ext(file_path, ext): ''' Change extension of a file_path to something else (provide None to remove) ''' if not file_path: raise Exception("File path cannot be empty") dirname = os.path.dirname(file_path) filename = FileHelper.getfilename(file_path) if ext: filename = filename + '.' + ext return os.path.join(dirname, filename)
[ "def", "replace_ext", "(", "file_path", ",", "ext", ")", ":", "if", "not", "file_path", ":", "raise", "Exception", "(", "\"File path cannot be empty\"", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "file_path", ")", "filename", "=", "FileHelper", ".", "getfilename", "(", "file_path", ")", "if", "ext", ":", "filename", "=", "filename", "+", "'.'", "+", "ext", "return", "os", ".", "path", ".", "join", "(", "dirname", ",", "filename", ")" ]
Change extension of a file_path to something else (provide None to remove)
[ "Change", "extension", "of", "a", "file_path", "to", "something", "else", "(", "provide", "None", "to", "remove", ")" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L509-L517
letuananh/chirptext
chirptext/leutile.py
FileHelper.replace_name
def replace_name(file_path, new_name): ''' Change the file name in a path but keep the extension ''' if not file_path: raise Exception("File path cannot be empty") elif not new_name: raise Exception("New name cannot be empty") dirname = os.path.dirname(file_path) ext = os.path.splitext(os.path.basename(file_path))[1] return os.path.join(dirname, new_name + ext)
python
def replace_name(file_path, new_name): ''' Change the file name in a path but keep the extension ''' if not file_path: raise Exception("File path cannot be empty") elif not new_name: raise Exception("New name cannot be empty") dirname = os.path.dirname(file_path) ext = os.path.splitext(os.path.basename(file_path))[1] return os.path.join(dirname, new_name + ext)
[ "def", "replace_name", "(", "file_path", ",", "new_name", ")", ":", "if", "not", "file_path", ":", "raise", "Exception", "(", "\"File path cannot be empty\"", ")", "elif", "not", "new_name", ":", "raise", "Exception", "(", "\"New name cannot be empty\"", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "file_path", ")", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "file_path", ")", ")", "[", "1", "]", "return", "os", ".", "path", ".", "join", "(", "dirname", ",", "new_name", "+", "ext", ")" ]
Change the file name in a path but keep the extension
[ "Change", "the", "file", "name", "in", "a", "path", "but", "keep", "the", "extension" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L520-L528
letuananh/chirptext
chirptext/leutile.py
FileHelper.get_child_folders
def get_child_folders(path): ''' Get all child folders of a folder ''' path = FileHelper.abspath(path) return [dirname for dirname in os.listdir(path) if os.path.isdir(os.path.join(path, dirname))]
python
def get_child_folders(path): ''' Get all child folders of a folder ''' path = FileHelper.abspath(path) return [dirname for dirname in os.listdir(path) if os.path.isdir(os.path.join(path, dirname))]
[ "def", "get_child_folders", "(", "path", ")", ":", "path", "=", "FileHelper", ".", "abspath", "(", "path", ")", "return", "[", "dirname", "for", "dirname", "in", "os", ".", "listdir", "(", "path", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "path", ",", "dirname", ")", ")", "]" ]
Get all child folders of a folder
[ "Get", "all", "child", "folders", "of", "a", "folder" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L544-L547
letuananh/chirptext
chirptext/leutile.py
FileHelper.get_child_files
def get_child_files(path): ''' Get all child files of a folder ''' path = FileHelper.abspath(path) return [filename for filename in os.listdir(path) if os.path.isfile(os.path.join(path, filename))]
python
def get_child_files(path): ''' Get all child files of a folder ''' path = FileHelper.abspath(path) return [filename for filename in os.listdir(path) if os.path.isfile(os.path.join(path, filename))]
[ "def", "get_child_files", "(", "path", ")", ":", "path", "=", "FileHelper", ".", "abspath", "(", "path", ")", "return", "[", "filename", "for", "filename", "in", "os", ".", "listdir", "(", "path", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "path", ",", "filename", ")", ")", "]" ]
Get all child files of a folder
[ "Get", "all", "child", "files", "of", "a", "folder" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L550-L553
letuananh/chirptext
chirptext/leutile.py
FileHelper.remove_file
def remove_file(filepath): ''' Delete a file ''' try: os.remove(os.path.abspath(os.path.expanduser(filepath))) except OSError as e: if e.errno != errno.ENOENT: raise
python
def remove_file(filepath): ''' Delete a file ''' try: os.remove(os.path.abspath(os.path.expanduser(filepath))) except OSError as e: if e.errno != errno.ENOENT: raise
[ "def", "remove_file", "(", "filepath", ")", ":", "try", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "expanduser", "(", "filepath", ")", ")", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "!=", "errno", ".", "ENOENT", ":", "raise" ]
Delete a file
[ "Delete", "a", "file" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L556-L562
letuananh/chirptext
chirptext/leutile.py
AppConfig._ptn2fn
def _ptn2fn(self, pattern): ''' Pattern to filename ''' return [pattern.format(wd=self.working_dir, n=self.__name, mode=self.__mode), pattern.format(wd=self.working_dir, n='{}.{}'.format(self.__name, self.__mode), mode=self.__mode)]
python
def _ptn2fn(self, pattern): ''' Pattern to filename ''' return [pattern.format(wd=self.working_dir, n=self.__name, mode=self.__mode), pattern.format(wd=self.working_dir, n='{}.{}'.format(self.__name, self.__mode), mode=self.__mode)]
[ "def", "_ptn2fn", "(", "self", ",", "pattern", ")", ":", "return", "[", "pattern", ".", "format", "(", "wd", "=", "self", ".", "working_dir", ",", "n", "=", "self", ".", "__name", ",", "mode", "=", "self", ".", "__mode", ")", ",", "pattern", ".", "format", "(", "wd", "=", "self", ".", "working_dir", ",", "n", "=", "'{}.{}'", ".", "format", "(", "self", ".", "__name", ",", "self", ".", "__mode", ")", ",", "mode", "=", "self", ".", "__mode", ")", "]" ]
Pattern to filename
[ "Pattern", "to", "filename" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L601-L604
letuananh/chirptext
chirptext/leutile.py
AppConfig.add_potential
def add_potential(self, *patterns): ''' Add a potential config file pattern ''' for ptn in patterns: self.__potential.extend(self._ptn2fn(ptn))
python
def add_potential(self, *patterns): ''' Add a potential config file pattern ''' for ptn in patterns: self.__potential.extend(self._ptn2fn(ptn))
[ "def", "add_potential", "(", "self", ",", "*", "patterns", ")", ":", "for", "ptn", "in", "patterns", ":", "self", ".", "__potential", ".", "extend", "(", "self", ".", "_ptn2fn", "(", "ptn", ")", ")" ]
Add a potential config file pattern
[ "Add", "a", "potential", "config", "file", "pattern" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L606-L609
letuananh/chirptext
chirptext/leutile.py
AppConfig.locate_config
def locate_config(self): ''' Locate config file ''' for f in self.__potential: f = FileHelper.abspath(f) if os.path.isfile(f): return f return None
python
def locate_config(self): ''' Locate config file ''' for f in self.__potential: f = FileHelper.abspath(f) if os.path.isfile(f): return f return None
[ "def", "locate_config", "(", "self", ")", ":", "for", "f", "in", "self", ".", "__potential", ":", "f", "=", "FileHelper", ".", "abspath", "(", "f", ")", "if", "os", ".", "path", ".", "isfile", "(", "f", ")", ":", "return", "f", "return", "None" ]
Locate config file
[ "Locate", "config", "file" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L611-L617
letuananh/chirptext
chirptext/leutile.py
AppConfig.config
def config(self): ''' Read config automatically if required ''' if self.__config is None: config_path = self.locate_config() if config_path: self.__config = self.read_file(config_path) self.__config_path = config_path return self.__config
python
def config(self): ''' Read config automatically if required ''' if self.__config is None: config_path = self.locate_config() if config_path: self.__config = self.read_file(config_path) self.__config_path = config_path return self.__config
[ "def", "config", "(", "self", ")", ":", "if", "self", ".", "__config", "is", "None", ":", "config_path", "=", "self", ".", "locate_config", "(", ")", "if", "config_path", ":", "self", ".", "__config", "=", "self", ".", "read_file", "(", "config_path", ")", "self", ".", "__config_path", "=", "config_path", "return", "self", ".", "__config" ]
Read config automatically if required
[ "Read", "config", "automatically", "if", "required" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L620-L627
letuananh/chirptext
chirptext/leutile.py
AppConfig.read_file
def read_file(self, file_path): ''' Read a configuration file and return configuration data ''' getLogger().info("Loading app config from {} file: {}".format(self.__mode, file_path)) if self.__mode == AppConfig.JSON: return json.loads(FileHelper.read(file_path), object_pairs_hook=OrderedDict) elif self.__mode == AppConfig.INI: config = configparser.ConfigParser(allow_no_value=True) config.read(file_path) return config
python
def read_file(self, file_path): ''' Read a configuration file and return configuration data ''' getLogger().info("Loading app config from {} file: {}".format(self.__mode, file_path)) if self.__mode == AppConfig.JSON: return json.loads(FileHelper.read(file_path), object_pairs_hook=OrderedDict) elif self.__mode == AppConfig.INI: config = configparser.ConfigParser(allow_no_value=True) config.read(file_path) return config
[ "def", "read_file", "(", "self", ",", "file_path", ")", ":", "getLogger", "(", ")", ".", "info", "(", "\"Loading app config from {} file: {}\"", ".", "format", "(", "self", ".", "__mode", ",", "file_path", ")", ")", "if", "self", ".", "__mode", "==", "AppConfig", ".", "JSON", ":", "return", "json", ".", "loads", "(", "FileHelper", ".", "read", "(", "file_path", ")", ",", "object_pairs_hook", "=", "OrderedDict", ")", "elif", "self", ".", "__mode", "==", "AppConfig", ".", "INI", ":", "config", "=", "configparser", ".", "ConfigParser", "(", "allow_no_value", "=", "True", ")", "config", ".", "read", "(", "file_path", ")", "return", "config" ]
Read a configuration file and return configuration data
[ "Read", "a", "configuration", "file", "and", "return", "configuration", "data" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L629-L637
letuananh/chirptext
chirptext/leutile.py
AppConfig.load
def load(self, file_path): ''' Load configuration from a specific file ''' self.clear() self.__config = self.read_file(file_path)
python
def load(self, file_path): ''' Load configuration from a specific file ''' self.clear() self.__config = self.read_file(file_path)
[ "def", "load", "(", "self", ",", "file_path", ")", ":", "self", ".", "clear", "(", ")", "self", ".", "__config", "=", "self", ".", "read_file", "(", "file_path", ")" ]
Load configuration from a specific file
[ "Load", "configuration", "from", "a", "specific", "file" ]
train
https://github.com/letuananh/chirptext/blob/ce60b47257b272a587c8703ea1f86cd1a45553a7/chirptext/leutile.py#L639-L642
alefnula/tea
tea/process/wrapper.py
get_processes
def get_processes(sort_by_name=True): """Retrieve a list of processes sorted by name. Args: sort_by_name (bool): Sort the list by name or by process ID's. Returns: list of (int, str) or list of (int, str, str): List of process id, process name and optional cmdline tuples. """ if sort_by_name: return sorted( _list_processes(), key=cmp_to_key( lambda p1, p2: (cmp(p1.name, p2.name) or cmp(p1.pid, p2.pid)) ), ) else: return sorted( _list_processes(), key=cmp_to_key( lambda p1, p2: (cmp(p1.pid, p2.pid) or cmp(p1.name, p2.name)) ), )
python
def get_processes(sort_by_name=True): """Retrieve a list of processes sorted by name. Args: sort_by_name (bool): Sort the list by name or by process ID's. Returns: list of (int, str) or list of (int, str, str): List of process id, process name and optional cmdline tuples. """ if sort_by_name: return sorted( _list_processes(), key=cmp_to_key( lambda p1, p2: (cmp(p1.name, p2.name) or cmp(p1.pid, p2.pid)) ), ) else: return sorted( _list_processes(), key=cmp_to_key( lambda p1, p2: (cmp(p1.pid, p2.pid) or cmp(p1.name, p2.name)) ), )
[ "def", "get_processes", "(", "sort_by_name", "=", "True", ")", ":", "if", "sort_by_name", ":", "return", "sorted", "(", "_list_processes", "(", ")", ",", "key", "=", "cmp_to_key", "(", "lambda", "p1", ",", "p2", ":", "(", "cmp", "(", "p1", ".", "name", ",", "p2", ".", "name", ")", "or", "cmp", "(", "p1", ".", "pid", ",", "p2", ".", "pid", ")", ")", ")", ",", ")", "else", ":", "return", "sorted", "(", "_list_processes", "(", ")", ",", "key", "=", "cmp_to_key", "(", "lambda", "p1", ",", "p2", ":", "(", "cmp", "(", "p1", ".", "pid", ",", "p2", ".", "pid", ")", "or", "cmp", "(", "p1", ".", "name", ",", "p2", ".", "name", ")", ")", ")", ",", ")" ]
Retrieve a list of processes sorted by name. Args: sort_by_name (bool): Sort the list by name or by process ID's. Returns: list of (int, str) or list of (int, str, str): List of process id, process name and optional cmdline tuples.
[ "Retrieve", "a", "list", "of", "processes", "sorted", "by", "name", ".", "Args", ":", "sort_by_name", "(", "bool", ")", ":", "Sort", "the", "list", "by", "name", "or", "by", "process", "ID", "s", ".", "Returns", ":", "list", "of", "(", "int", "str", ")", "or", "list", "of", "(", "int", "str", "str", ")", ":", "List", "of", "process", "id", "process", "name", "and", "optional", "cmdline", "tuples", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/process/wrapper.py#L36-L59
alefnula/tea
tea/process/wrapper.py
find
def find(name, arg=None): """Find process by name or by argument in command line. Args: name (str): Process name to search for. arg (str): Command line argument for a process to search for. Returns: tea.process.base.IProcess: Process object if found. """ for p in get_processes(): if p.name.lower().find(name.lower()) != -1: if arg is not None: for a in p.cmdline or []: if a.lower().find(arg.lower()) != -1: return p else: return p return None
python
def find(name, arg=None): """Find process by name or by argument in command line. Args: name (str): Process name to search for. arg (str): Command line argument for a process to search for. Returns: tea.process.base.IProcess: Process object if found. """ for p in get_processes(): if p.name.lower().find(name.lower()) != -1: if arg is not None: for a in p.cmdline or []: if a.lower().find(arg.lower()) != -1: return p else: return p return None
[ "def", "find", "(", "name", ",", "arg", "=", "None", ")", ":", "for", "p", "in", "get_processes", "(", ")", ":", "if", "p", ".", "name", ".", "lower", "(", ")", ".", "find", "(", "name", ".", "lower", "(", ")", ")", "!=", "-", "1", ":", "if", "arg", "is", "not", "None", ":", "for", "a", "in", "p", ".", "cmdline", "or", "[", "]", ":", "if", "a", ".", "lower", "(", ")", ".", "find", "(", "arg", ".", "lower", "(", ")", ")", "!=", "-", "1", ":", "return", "p", "else", ":", "return", "p", "return", "None" ]
Find process by name or by argument in command line. Args: name (str): Process name to search for. arg (str): Command line argument for a process to search for. Returns: tea.process.base.IProcess: Process object if found.
[ "Find", "process", "by", "name", "or", "by", "argument", "in", "command", "line", ".", "Args", ":", "name", "(", "str", ")", ":", "Process", "name", "to", "search", "for", ".", "arg", "(", "str", ")", ":", "Command", "line", "argument", "for", "a", "process", "to", "search", "for", ".", "Returns", ":", "tea", ".", "process", ".", "base", ".", "IProcess", ":", "Process", "object", "if", "found", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/process/wrapper.py#L62-L80
alefnula/tea
tea/process/wrapper.py
execute
def execute(command, *args, **kwargs): """Execute a command with arguments and wait for output. Arguments should not be quoted! Keyword arguments: env (dict): Dictionary of additional environment variables. wait (bool): Wait for the process to finish. Example:: >>> code = 'import sys;sys.stdout.write('out');sys.exit(0)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 0, output: out, error: >>> code = 'import sys;sys.stderr.write('out');sys.exit(1)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 1, output: , error: err """ wait = kwargs.pop("wait", True) process = Process(command, args, env=kwargs.pop("env", None)) process.start() if not wait: return process process.wait() return process.exit_code, process.read(), process.eread()
python
def execute(command, *args, **kwargs): """Execute a command with arguments and wait for output. Arguments should not be quoted! Keyword arguments: env (dict): Dictionary of additional environment variables. wait (bool): Wait for the process to finish. Example:: >>> code = 'import sys;sys.stdout.write('out');sys.exit(0)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 0, output: out, error: >>> code = 'import sys;sys.stderr.write('out');sys.exit(1)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 1, output: , error: err """ wait = kwargs.pop("wait", True) process = Process(command, args, env=kwargs.pop("env", None)) process.start() if not wait: return process process.wait() return process.exit_code, process.read(), process.eread()
[ "def", "execute", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "wait", "=", "kwargs", ".", "pop", "(", "\"wait\"", ",", "True", ")", "process", "=", "Process", "(", "command", ",", "args", ",", "env", "=", "kwargs", ".", "pop", "(", "\"env\"", ",", "None", ")", ")", "process", ".", "start", "(", ")", "if", "not", "wait", ":", "return", "process", "process", ".", "wait", "(", ")", "return", "process", ".", "exit_code", ",", "process", ".", "read", "(", ")", ",", "process", ".", "eread", "(", ")" ]
Execute a command with arguments and wait for output. Arguments should not be quoted! Keyword arguments: env (dict): Dictionary of additional environment variables. wait (bool): Wait for the process to finish. Example:: >>> code = 'import sys;sys.stdout.write('out');sys.exit(0)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 0, output: out, error: >>> code = 'import sys;sys.stderr.write('out');sys.exit(1)' >>> status, out, err = execute('python', '-c', code) >>> print('status: %s, output: %s, error: %s' % (status, out, err)) status: 1, output: , error: err
[ "Execute", "a", "command", "with", "arguments", "and", "wait", "for", "output", ".", "Arguments", "should", "not", "be", "quoted!", "Keyword", "arguments", ":", "env", "(", "dict", ")", ":", "Dictionary", "of", "additional", "environment", "variables", ".", "wait", "(", "bool", ")", ":", "Wait", "for", "the", "process", "to", "finish", ".", "Example", "::", ">>>", "code", "=", "import", "sys", ";", "sys", ".", "stdout", ".", "write", "(", "out", ")", ";", "sys", ".", "exit", "(", "0", ")", ">>>", "status", "out", "err", "=", "execute", "(", "python", "-", "c", "code", ")", ">>>", "print", "(", "status", ":", "%s", "output", ":", "%s", "error", ":", "%s", "%", "(", "status", "out", "err", "))", "status", ":", "0", "output", ":", "out", "error", ":", ">>>", "code", "=", "import", "sys", ";", "sys", ".", "stderr", ".", "write", "(", "out", ")", ";", "sys", ".", "exit", "(", "1", ")", ">>>", "status", "out", "err", "=", "execute", "(", "python", "-", "c", "code", ")", ">>>", "print", "(", "status", ":", "%s", "output", ":", "%s", "error", ":", "%s", "%", "(", "status", "out", "err", "))", "status", ":", "1", "output", ":", "error", ":", "err" ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/process/wrapper.py#L83-L109
alefnula/tea
tea/process/wrapper.py
execute_and_report
def execute_and_report(command, *args, **kwargs): """Execute a command with arguments and wait for output. If execution was successful function will return True, if not, it will log the output using standard logging and return False. """ logging.info("Execute: %s %s" % (command, " ".join(args))) try: status, out, err = execute(command, *args, **kwargs) if status == 0: logging.info( "%s Finished successfully. Exit Code: 0.", os.path.basename(command), ) return True else: try: logging.error( "%s failed! Exit Code: %s\nOut: %s\nError: %s", os.path.basename(command), status, out, err, ) except Exception as e: # This fails when some non ASCII characters are returned # from the application logging.error( "%s failed [%s]! Exit Code: %s\nOut: %s\nError: %s", e, os.path.basename(command), status, repr(out), repr(err), ) return False except Exception: logging.exception( "%s failed! Exception thrown!", os.path.basename(command) ) return False
python
def execute_and_report(command, *args, **kwargs): """Execute a command with arguments and wait for output. If execution was successful function will return True, if not, it will log the output using standard logging and return False. """ logging.info("Execute: %s %s" % (command, " ".join(args))) try: status, out, err = execute(command, *args, **kwargs) if status == 0: logging.info( "%s Finished successfully. Exit Code: 0.", os.path.basename(command), ) return True else: try: logging.error( "%s failed! Exit Code: %s\nOut: %s\nError: %s", os.path.basename(command), status, out, err, ) except Exception as e: # This fails when some non ASCII characters are returned # from the application logging.error( "%s failed [%s]! Exit Code: %s\nOut: %s\nError: %s", e, os.path.basename(command), status, repr(out), repr(err), ) return False except Exception: logging.exception( "%s failed! Exception thrown!", os.path.basename(command) ) return False
[ "def", "execute_and_report", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "logging", ".", "info", "(", "\"Execute: %s %s\"", "%", "(", "command", ",", "\" \"", ".", "join", "(", "args", ")", ")", ")", "try", ":", "status", ",", "out", ",", "err", "=", "execute", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "status", "==", "0", ":", "logging", ".", "info", "(", "\"%s Finished successfully. Exit Code: 0.\"", ",", "os", ".", "path", ".", "basename", "(", "command", ")", ",", ")", "return", "True", "else", ":", "try", ":", "logging", ".", "error", "(", "\"%s failed! Exit Code: %s\\nOut: %s\\nError: %s\"", ",", "os", ".", "path", ".", "basename", "(", "command", ")", ",", "status", ",", "out", ",", "err", ",", ")", "except", "Exception", "as", "e", ":", "# This fails when some non ASCII characters are returned\r", "# from the application\r", "logging", ".", "error", "(", "\"%s failed [%s]! Exit Code: %s\\nOut: %s\\nError: %s\"", ",", "e", ",", "os", ".", "path", ".", "basename", "(", "command", ")", ",", "status", ",", "repr", "(", "out", ")", ",", "repr", "(", "err", ")", ",", ")", "return", "False", "except", "Exception", ":", "logging", ".", "exception", "(", "\"%s failed! Exception thrown!\"", ",", "os", ".", "path", ".", "basename", "(", "command", ")", ")", "return", "False" ]
Execute a command with arguments and wait for output. If execution was successful function will return True, if not, it will log the output using standard logging and return False.
[ "Execute", "a", "command", "with", "arguments", "and", "wait", "for", "output", ".", "If", "execution", "was", "successful", "function", "will", "return", "True", "if", "not", "it", "will", "log", "the", "output", "using", "standard", "logging", "and", "return", "False", "." ]
train
https://github.com/alefnula/tea/blob/f5a0a724a425ec4f9dd2c7fe966ef06faf3a15a3/tea/process/wrapper.py#L112-L152
jonhadfield/creds
lib/creds/ssh.py
read_authorized_keys
def read_authorized_keys(username=None): """Read public keys from specified user's authorized_keys file. args: username (str): username. returns: list: Authorised keys for the specified user. """ authorized_keys_path = '{0}/.ssh/authorized_keys'.format(os.path.expanduser('~{0}'.format(username))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(username, rnd_chars) authorized_keys = list() copy_result = execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), authorized_keys_path, tmp_authorized_keys_path)))) result_message = copy_result[0][1].decode('UTF-8') if 'you must have a tty to run sudo' in result_message: # pragma: no cover raise OSError("/etc/sudoers is blocked sudo. Remove entry: 'Defaults requiretty'.") elif 'No such file or directory' not in result_message: execute_command(shlex.split(str('{0} chmod 755 {1}'.format(sudo_check(), tmp_authorized_keys_path)))) with open(tmp_authorized_keys_path) as keys_file: for key in keys_file: authorized_keys.append(PublicKey(raw=key)) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path)))) return authorized_keys
python
def read_authorized_keys(username=None): """Read public keys from specified user's authorized_keys file. args: username (str): username. returns: list: Authorised keys for the specified user. """ authorized_keys_path = '{0}/.ssh/authorized_keys'.format(os.path.expanduser('~{0}'.format(username))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(username, rnd_chars) authorized_keys = list() copy_result = execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), authorized_keys_path, tmp_authorized_keys_path)))) result_message = copy_result[0][1].decode('UTF-8') if 'you must have a tty to run sudo' in result_message: # pragma: no cover raise OSError("/etc/sudoers is blocked sudo. Remove entry: 'Defaults requiretty'.") elif 'No such file or directory' not in result_message: execute_command(shlex.split(str('{0} chmod 755 {1}'.format(sudo_check(), tmp_authorized_keys_path)))) with open(tmp_authorized_keys_path) as keys_file: for key in keys_file: authorized_keys.append(PublicKey(raw=key)) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path)))) return authorized_keys
[ "def", "read_authorized_keys", "(", "username", "=", "None", ")", ":", "authorized_keys_path", "=", "'{0}/.ssh/authorized_keys'", ".", "format", "(", "os", ".", "path", ".", "expanduser", "(", "'~{0}'", ".", "format", "(", "username", ")", ")", ")", "rnd_chars", "=", "random_string", "(", "length", "=", "RANDOM_FILE_EXT_LENGTH", ")", "tmp_authorized_keys_path", "=", "'/tmp/authorized_keys_{0}_{1}'", ".", "format", "(", "username", ",", "rnd_chars", ")", "authorized_keys", "=", "list", "(", ")", "copy_result", "=", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} cp {1} {2}'", ".", "format", "(", "sudo_check", "(", ")", ",", "authorized_keys_path", ",", "tmp_authorized_keys_path", ")", ")", ")", ")", "result_message", "=", "copy_result", "[", "0", "]", "[", "1", "]", ".", "decode", "(", "'UTF-8'", ")", "if", "'you must have a tty to run sudo'", "in", "result_message", ":", "# pragma: no cover", "raise", "OSError", "(", "\"/etc/sudoers is blocked sudo. Remove entry: 'Defaults requiretty'.\"", ")", "elif", "'No such file or directory'", "not", "in", "result_message", ":", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} chmod 755 {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "tmp_authorized_keys_path", ")", ")", ")", ")", "with", "open", "(", "tmp_authorized_keys_path", ")", "as", "keys_file", ":", "for", "key", "in", "keys_file", ":", "authorized_keys", ".", "append", "(", "PublicKey", "(", "raw", "=", "key", ")", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} rm {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "tmp_authorized_keys_path", ")", ")", ")", ")", "return", "authorized_keys" ]
Read public keys from specified user's authorized_keys file. args: username (str): username. returns: list: Authorised keys for the specified user.
[ "Read", "public", "keys", "from", "specified", "user", "s", "authorized_keys", "file", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/ssh.py#L63-L87
jonhadfield/creds
lib/creds/ssh.py
write_authorized_keys
def write_authorized_keys(user=None): """Write public keys back to authorized_keys file. Create keys directory if it doesn't already exist. args: user (User): Instance of User containing keys. returns: list: Authorised keys for the specified user. """ authorized_keys = list() authorized_keys_dir = '{0}/.ssh'.format(os.path.expanduser('~{0}'.format(user.name))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) authorized_keys_path = '{0}/authorized_keys'.format(authorized_keys_dir) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(user.name, rnd_chars) if not os.path.isdir(authorized_keys_dir): execute_command(shlex.split(str('{0} mkdir -p {1}'.format(sudo_check(), authorized_keys_dir)))) for key in user.public_keys: authorized_keys.append('{0}\n'.format(key.raw)) with open(tmp_authorized_keys_path, mode=text_type('w+')) as keys_file: keys_file.writelines(authorized_keys) execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), tmp_authorized_keys_path, authorized_keys_path)))) execute_command(shlex.split(str('{0} chown -R {1} {2}'.format(sudo_check(), user.name, authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 700 {1}'.format(sudo_check(), authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 600 {1}'.format(sudo_check(), authorized_keys_path)))) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path))))
python
def write_authorized_keys(user=None): """Write public keys back to authorized_keys file. Create keys directory if it doesn't already exist. args: user (User): Instance of User containing keys. returns: list: Authorised keys for the specified user. """ authorized_keys = list() authorized_keys_dir = '{0}/.ssh'.format(os.path.expanduser('~{0}'.format(user.name))) rnd_chars = random_string(length=RANDOM_FILE_EXT_LENGTH) authorized_keys_path = '{0}/authorized_keys'.format(authorized_keys_dir) tmp_authorized_keys_path = '/tmp/authorized_keys_{0}_{1}'.format(user.name, rnd_chars) if not os.path.isdir(authorized_keys_dir): execute_command(shlex.split(str('{0} mkdir -p {1}'.format(sudo_check(), authorized_keys_dir)))) for key in user.public_keys: authorized_keys.append('{0}\n'.format(key.raw)) with open(tmp_authorized_keys_path, mode=text_type('w+')) as keys_file: keys_file.writelines(authorized_keys) execute_command( shlex.split(str('{0} cp {1} {2}'.format(sudo_check(), tmp_authorized_keys_path, authorized_keys_path)))) execute_command(shlex.split(str('{0} chown -R {1} {2}'.format(sudo_check(), user.name, authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 700 {1}'.format(sudo_check(), authorized_keys_dir)))) execute_command(shlex.split(str('{0} chmod 600 {1}'.format(sudo_check(), authorized_keys_path)))) execute_command(shlex.split(str('{0} rm {1}'.format(sudo_check(), tmp_authorized_keys_path))))
[ "def", "write_authorized_keys", "(", "user", "=", "None", ")", ":", "authorized_keys", "=", "list", "(", ")", "authorized_keys_dir", "=", "'{0}/.ssh'", ".", "format", "(", "os", ".", "path", ".", "expanduser", "(", "'~{0}'", ".", "format", "(", "user", ".", "name", ")", ")", ")", "rnd_chars", "=", "random_string", "(", "length", "=", "RANDOM_FILE_EXT_LENGTH", ")", "authorized_keys_path", "=", "'{0}/authorized_keys'", ".", "format", "(", "authorized_keys_dir", ")", "tmp_authorized_keys_path", "=", "'/tmp/authorized_keys_{0}_{1}'", ".", "format", "(", "user", ".", "name", ",", "rnd_chars", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "authorized_keys_dir", ")", ":", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} mkdir -p {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "authorized_keys_dir", ")", ")", ")", ")", "for", "key", "in", "user", ".", "public_keys", ":", "authorized_keys", ".", "append", "(", "'{0}\\n'", ".", "format", "(", "key", ".", "raw", ")", ")", "with", "open", "(", "tmp_authorized_keys_path", ",", "mode", "=", "text_type", "(", "'w+'", ")", ")", "as", "keys_file", ":", "keys_file", ".", "writelines", "(", "authorized_keys", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} cp {1} {2}'", ".", "format", "(", "sudo_check", "(", ")", ",", "tmp_authorized_keys_path", ",", "authorized_keys_path", ")", ")", ")", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} chown -R {1} {2}'", ".", "format", "(", "sudo_check", "(", ")", ",", "user", ".", "name", ",", "authorized_keys_dir", ")", ")", ")", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} chmod 700 {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "authorized_keys_dir", ")", ")", ")", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} chmod 600 {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "authorized_keys_path", ")", ")", ")", ")", "execute_command", "(", "shlex", ".", "split", "(", "str", "(", "'{0} rm {1}'", ".", "format", "(", "sudo_check", "(", ")", ",", "tmp_authorized_keys_path", ")", ")", ")", ")" ]
Write public keys back to authorized_keys file. Create keys directory if it doesn't already exist. args: user (User): Instance of User containing keys. returns: list: Authorised keys for the specified user.
[ "Write", "public", "keys", "back", "to", "authorized_keys", "file", ".", "Create", "keys", "directory", "if", "it", "doesn", "t", "already", "exist", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/ssh.py#L90-L116
jonhadfield/creds
lib/creds/ssh.py
PublicKey.b64encoded
def b64encoded(self): """Return a base64 encoding of the key. returns: str: base64 encoding of the public key """ if self._b64encoded: return text_type(self._b64encoded).strip("\r\n") else: return base64encode(self.raw)
python
def b64encoded(self): """Return a base64 encoding of the key. returns: str: base64 encoding of the public key """ if self._b64encoded: return text_type(self._b64encoded).strip("\r\n") else: return base64encode(self.raw)
[ "def", "b64encoded", "(", "self", ")", ":", "if", "self", ".", "_b64encoded", ":", "return", "text_type", "(", "self", ".", "_b64encoded", ")", ".", "strip", "(", "\"\\r\\n\"", ")", "else", ":", "return", "base64encode", "(", "self", ".", "raw", ")" ]
Return a base64 encoding of the key. returns: str: base64 encoding of the public key
[ "Return", "a", "base64", "encoding", "of", "the", "key", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/ssh.py#L31-L40
jonhadfield/creds
lib/creds/ssh.py
PublicKey.raw
def raw(self): """Return raw key. returns: str: raw key """ if self._raw: return text_type(self._raw).strip("\r\n") else: return text_type(base64decode(self._b64encoded)).strip("\r\n")
python
def raw(self): """Return raw key. returns: str: raw key """ if self._raw: return text_type(self._raw).strip("\r\n") else: return text_type(base64decode(self._b64encoded)).strip("\r\n")
[ "def", "raw", "(", "self", ")", ":", "if", "self", ".", "_raw", ":", "return", "text_type", "(", "self", ".", "_raw", ")", ".", "strip", "(", "\"\\r\\n\"", ")", "else", ":", "return", "text_type", "(", "base64decode", "(", "self", ".", "_b64encoded", ")", ")", ".", "strip", "(", "\"\\r\\n\"", ")" ]
Return raw key. returns: str: raw key
[ "Return", "raw", "key", "." ]
train
https://github.com/jonhadfield/creds/blob/b2053b43516cf742c6e4c2b79713bc625592f47c/lib/creds/ssh.py#L43-L52
night-crawler/django-docker-helpers
django_docker_helpers/config/backends/consul_parser.py
ConsulParser.inner_parser
def inner_parser(self) -> BaseParser: """ Prepares inner config parser for config stored at ``endpoint``. :return: an instance of :class:`~django_docker_helpers.config.backends.base.BaseParser` :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config """ if self._inner_parser is not None: return self._inner_parser __index, response_config = self.client.kv.get(self.endpoint, **self.kv_get_opts) if not response_config: raise KVStorageKeyDoestNotExist('Key does not exist: `{0}`'.format(self.endpoint)) config = response_config['Value'] if not config or config is self.sentinel: raise KVStorageValueIsEmpty('Read empty config by key `{0}`'.format(self.endpoint)) config = config.decode() self._inner_parser = self.inner_parser_class( config=io.StringIO(config), path_separator=self.path_separator, scope=None ) return self._inner_parser
python
def inner_parser(self) -> BaseParser: """ Prepares inner config parser for config stored at ``endpoint``. :return: an instance of :class:`~django_docker_helpers.config.backends.base.BaseParser` :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config """ if self._inner_parser is not None: return self._inner_parser __index, response_config = self.client.kv.get(self.endpoint, **self.kv_get_opts) if not response_config: raise KVStorageKeyDoestNotExist('Key does not exist: `{0}`'.format(self.endpoint)) config = response_config['Value'] if not config or config is self.sentinel: raise KVStorageValueIsEmpty('Read empty config by key `{0}`'.format(self.endpoint)) config = config.decode() self._inner_parser = self.inner_parser_class( config=io.StringIO(config), path_separator=self.path_separator, scope=None ) return self._inner_parser
[ "def", "inner_parser", "(", "self", ")", "->", "BaseParser", ":", "if", "self", ".", "_inner_parser", "is", "not", "None", ":", "return", "self", ".", "_inner_parser", "__index", ",", "response_config", "=", "self", ".", "client", ".", "kv", ".", "get", "(", "self", ".", "endpoint", ",", "*", "*", "self", ".", "kv_get_opts", ")", "if", "not", "response_config", ":", "raise", "KVStorageKeyDoestNotExist", "(", "'Key does not exist: `{0}`'", ".", "format", "(", "self", ".", "endpoint", ")", ")", "config", "=", "response_config", "[", "'Value'", "]", "if", "not", "config", "or", "config", "is", "self", ".", "sentinel", ":", "raise", "KVStorageValueIsEmpty", "(", "'Read empty config by key `{0}`'", ".", "format", "(", "self", ".", "endpoint", ")", ")", "config", "=", "config", ".", "decode", "(", ")", "self", ".", "_inner_parser", "=", "self", ".", "inner_parser_class", "(", "config", "=", "io", ".", "StringIO", "(", "config", ")", ",", "path_separator", "=", "self", ".", "path_separator", ",", "scope", "=", "None", ")", "return", "self", ".", "_inner_parser" ]
Prepares inner config parser for config stored at ``endpoint``. :return: an instance of :class:`~django_docker_helpers.config.backends.base.BaseParser` :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config
[ "Prepares", "inner", "config", "parser", "for", "config", "stored", "at", "endpoint", "." ]
train
https://github.com/night-crawler/django-docker-helpers/blob/b64f8009101a8eb61d3841124ba19e3ab881aa2f/django_docker_helpers/config/backends/consul_parser.py#L80-L108