repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
tamasgal/km3pipe | km3pipe/controlhost.py | Client._reconnect | def _reconnect(self):
"""Reconnect to JLigier and subscribe to the tags."""
log.debug("Reconnecting to JLigier...")
self._disconnect()
self._connect()
self._update_subscriptions() | python | def _reconnect(self):
"""Reconnect to JLigier and subscribe to the tags."""
log.debug("Reconnecting to JLigier...")
self._disconnect()
self._connect()
self._update_subscriptions() | [
"def",
"_reconnect",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"\"Reconnecting to JLigier...\"",
")",
"self",
".",
"_disconnect",
"(",
")",
"self",
".",
"_connect",
"(",
")",
"self",
".",
"_update_subscriptions",
"(",
")"
] | Reconnect to JLigier and subscribe to the tags. | [
"Reconnect",
"to",
"JLigier",
"and",
"subscribe",
"to",
"the",
"tags",
"."
] | 7a9b59ac899a28775b5bdc5d391d9a5340d08040 | https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/controlhost.py#L136-L141 | train |
tamasgal/km3pipe | km3pipe/controlhost.py | Tag.data | def data(self, value):
"""Set the byte data and fill up the bytes to fit the size."""
if not value:
value = b''
if len(value) > self.SIZE:
raise ValueError("The maximum tag size is {0}".format(self.SIZE))
self._data = value
while len(self._data) < self.SIZE:
self._data += b'\x00' | python | def data(self, value):
"""Set the byte data and fill up the bytes to fit the size."""
if not value:
value = b''
if len(value) > self.SIZE:
raise ValueError("The maximum tag size is {0}".format(self.SIZE))
self._data = value
while len(self._data) < self.SIZE:
self._data += b'\x00' | [
"def",
"data",
"(",
"self",
",",
"value",
")",
":",
"if",
"not",
"value",
":",
"value",
"=",
"b''",
"if",
"len",
"(",
"value",
")",
">",
"self",
".",
"SIZE",
":",
"raise",
"ValueError",
"(",
"\"The maximum tag size is {0}\"",
".",
"format",
"(",
"self",
".",
"SIZE",
")",
")",
"self",
".",
"_data",
"=",
"value",
"while",
"len",
"(",
"self",
".",
"_data",
")",
"<",
"self",
".",
"SIZE",
":",
"self",
".",
"_data",
"+=",
"b'\\x00'"
] | Set the byte data and fill up the bytes to fit the size. | [
"Set",
"the",
"byte",
"data",
"and",
"fill",
"up",
"the",
"bytes",
"to",
"fit",
"the",
"size",
"."
] | 7a9b59ac899a28775b5bdc5d391d9a5340d08040 | https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/controlhost.py#L177-L185 | train |
valdergallo/pyconst | pyconst/const.py | Const.add | def add(self, name, attr=None, value=None):
"Set values in constant"
if isinstance(name, tuple) or isinstance(name, list):
name, attr, value = self.__set_iter_value(name)
if attr is None:
attr = name
if value is None:
value = attr
self.__data += (self.get_const_string(name=name, value=value),)
# set attribute as slugfiy
self.__dict__[s_attr(attr)] = self.__data[-1] | python | def add(self, name, attr=None, value=None):
"Set values in constant"
if isinstance(name, tuple) or isinstance(name, list):
name, attr, value = self.__set_iter_value(name)
if attr is None:
attr = name
if value is None:
value = attr
self.__data += (self.get_const_string(name=name, value=value),)
# set attribute as slugfiy
self.__dict__[s_attr(attr)] = self.__data[-1] | [
"def",
"add",
"(",
"self",
",",
"name",
",",
"attr",
"=",
"None",
",",
"value",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"tuple",
")",
"or",
"isinstance",
"(",
"name",
",",
"list",
")",
":",
"name",
",",
"attr",
",",
"value",
"=",
"self",
".",
"__set_iter_value",
"(",
"name",
")",
"if",
"attr",
"is",
"None",
":",
"attr",
"=",
"name",
"if",
"value",
"is",
"None",
":",
"value",
"=",
"attr",
"self",
".",
"__data",
"+=",
"(",
"self",
".",
"get_const_string",
"(",
"name",
"=",
"name",
",",
"value",
"=",
"value",
")",
",",
")",
"# set attribute as slugfiy",
"self",
".",
"__dict__",
"[",
"s_attr",
"(",
"attr",
")",
"]",
"=",
"self",
".",
"__data",
"[",
"-",
"1",
"]"
] | Set values in constant | [
"Set",
"values",
"in",
"constant"
] | af4cbc8d91ffab601ac5e45e5480f20c5462064d | https://github.com/valdergallo/pyconst/blob/af4cbc8d91ffab601ac5e45e5480f20c5462064d/pyconst/const.py#L54-L68 | train |
breerly/threadloop | threadloop/threadloop.py | ThreadLoop.start | def start(self):
"""Start IOLoop in daemonized thread."""
assert self._thread is None, 'thread already started'
# configure thread
self._thread = Thread(target=self._start_io_loop)
self._thread.daemon = True
# begin thread and block until ready
self._thread.start()
self._ready.wait() | python | def start(self):
"""Start IOLoop in daemonized thread."""
assert self._thread is None, 'thread already started'
# configure thread
self._thread = Thread(target=self._start_io_loop)
self._thread.daemon = True
# begin thread and block until ready
self._thread.start()
self._ready.wait() | [
"def",
"start",
"(",
"self",
")",
":",
"assert",
"self",
".",
"_thread",
"is",
"None",
",",
"'thread already started'",
"# configure thread",
"self",
".",
"_thread",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"_start_io_loop",
")",
"self",
".",
"_thread",
".",
"daemon",
"=",
"True",
"# begin thread and block until ready",
"self",
".",
"_thread",
".",
"start",
"(",
")",
"self",
".",
"_ready",
".",
"wait",
"(",
")"
] | Start IOLoop in daemonized thread. | [
"Start",
"IOLoop",
"in",
"daemonized",
"thread",
"."
] | 9d4f83660fd79fe15d741b831be9ee28dccacc30 | https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L56-L66 | train |
breerly/threadloop | threadloop/threadloop.py | ThreadLoop._start_io_loop | def _start_io_loop(self):
"""Start IOLoop then set ready threading.Event."""
def mark_as_ready():
self._ready.set()
if not self._io_loop:
self._io_loop = ioloop.IOLoop()
self._io_loop.add_callback(mark_as_ready)
self._io_loop.start() | python | def _start_io_loop(self):
"""Start IOLoop then set ready threading.Event."""
def mark_as_ready():
self._ready.set()
if not self._io_loop:
self._io_loop = ioloop.IOLoop()
self._io_loop.add_callback(mark_as_ready)
self._io_loop.start() | [
"def",
"_start_io_loop",
"(",
"self",
")",
":",
"def",
"mark_as_ready",
"(",
")",
":",
"self",
".",
"_ready",
".",
"set",
"(",
")",
"if",
"not",
"self",
".",
"_io_loop",
":",
"self",
".",
"_io_loop",
"=",
"ioloop",
".",
"IOLoop",
"(",
")",
"self",
".",
"_io_loop",
".",
"add_callback",
"(",
"mark_as_ready",
")",
"self",
".",
"_io_loop",
".",
"start",
"(",
")"
] | Start IOLoop then set ready threading.Event. | [
"Start",
"IOLoop",
"then",
"set",
"ready",
"threading",
".",
"Event",
"."
] | 9d4f83660fd79fe15d741b831be9ee28dccacc30 | https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L68-L78 | train |
breerly/threadloop | threadloop/threadloop.py | ThreadLoop.is_ready | def is_ready(self):
"""Is thread & ioloop ready.
:returns bool:
"""
if not self._thread:
return False
if not self._ready.is_set():
return False
return True | python | def is_ready(self):
"""Is thread & ioloop ready.
:returns bool:
"""
if not self._thread:
return False
if not self._ready.is_set():
return False
return True | [
"def",
"is_ready",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_thread",
":",
"return",
"False",
"if",
"not",
"self",
".",
"_ready",
".",
"is_set",
"(",
")",
":",
"return",
"False",
"return",
"True"
] | Is thread & ioloop ready.
:returns bool: | [
"Is",
"thread",
"&",
"ioloop",
"ready",
"."
] | 9d4f83660fd79fe15d741b831be9ee28dccacc30 | https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L80-L91 | train |
breerly/threadloop | threadloop/threadloop.py | ThreadLoop.submit | def submit(self, fn, *args, **kwargs):
"""Submit Tornado Coroutine to IOLoop in daemonized thread.
:param fn: Tornado Coroutine to execute
:param args: Args to pass to coroutine
:param kwargs: Kwargs to pass to coroutine
:returns concurrent.futures.Future: future result of coroutine
"""
if not self.is_ready():
raise ThreadNotStartedError(
"The thread has not been started yet, "
"make sure you call start() first"
)
future = Future()
def execute():
"""Executes fn on the IOLoop."""
try:
result = gen.maybe_future(fn(*args, **kwargs))
except Exception:
# The function we ran didn't return a future and instead raised
# an exception. Let's pretend that it returned this dummy
# future with our stack trace.
f = gen.Future()
f.set_exc_info(sys.exc_info())
on_done(f)
else:
result.add_done_callback(on_done)
def on_done(f):
"""Sets tornado.Future results to the concurrent.Future."""
if not f.exception():
future.set_result(f.result())
return
# if f is a tornado future, then it has exc_info()
if hasattr(f, 'exc_info'):
exception, traceback = f.exc_info()[1:]
# else it's a concurrent.future
else:
# python2's concurrent.future has exception_info()
if hasattr(f, 'exception_info'):
exception, traceback = f.exception_info()
# python3's concurrent.future just has exception()
else:
exception = f.exception()
traceback = None
# python2 needs exc_info set explicitly
if _FUTURE_HAS_EXC_INFO:
future.set_exception_info(exception, traceback)
return
# python3 just needs the exception, exc_info works fine
future.set_exception(exception)
self._io_loop.add_callback(execute)
return future | python | def submit(self, fn, *args, **kwargs):
"""Submit Tornado Coroutine to IOLoop in daemonized thread.
:param fn: Tornado Coroutine to execute
:param args: Args to pass to coroutine
:param kwargs: Kwargs to pass to coroutine
:returns concurrent.futures.Future: future result of coroutine
"""
if not self.is_ready():
raise ThreadNotStartedError(
"The thread has not been started yet, "
"make sure you call start() first"
)
future = Future()
def execute():
"""Executes fn on the IOLoop."""
try:
result = gen.maybe_future(fn(*args, **kwargs))
except Exception:
# The function we ran didn't return a future and instead raised
# an exception. Let's pretend that it returned this dummy
# future with our stack trace.
f = gen.Future()
f.set_exc_info(sys.exc_info())
on_done(f)
else:
result.add_done_callback(on_done)
def on_done(f):
"""Sets tornado.Future results to the concurrent.Future."""
if not f.exception():
future.set_result(f.result())
return
# if f is a tornado future, then it has exc_info()
if hasattr(f, 'exc_info'):
exception, traceback = f.exc_info()[1:]
# else it's a concurrent.future
else:
# python2's concurrent.future has exception_info()
if hasattr(f, 'exception_info'):
exception, traceback = f.exception_info()
# python3's concurrent.future just has exception()
else:
exception = f.exception()
traceback = None
# python2 needs exc_info set explicitly
if _FUTURE_HAS_EXC_INFO:
future.set_exception_info(exception, traceback)
return
# python3 just needs the exception, exc_info works fine
future.set_exception(exception)
self._io_loop.add_callback(execute)
return future | [
"def",
"submit",
"(",
"self",
",",
"fn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"is_ready",
"(",
")",
":",
"raise",
"ThreadNotStartedError",
"(",
"\"The thread has not been started yet, \"",
"\"make sure you call start() first\"",
")",
"future",
"=",
"Future",
"(",
")",
"def",
"execute",
"(",
")",
":",
"\"\"\"Executes fn on the IOLoop.\"\"\"",
"try",
":",
"result",
"=",
"gen",
".",
"maybe_future",
"(",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"except",
"Exception",
":",
"# The function we ran didn't return a future and instead raised",
"# an exception. Let's pretend that it returned this dummy",
"# future with our stack trace.",
"f",
"=",
"gen",
".",
"Future",
"(",
")",
"f",
".",
"set_exc_info",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
"on_done",
"(",
"f",
")",
"else",
":",
"result",
".",
"add_done_callback",
"(",
"on_done",
")",
"def",
"on_done",
"(",
"f",
")",
":",
"\"\"\"Sets tornado.Future results to the concurrent.Future.\"\"\"",
"if",
"not",
"f",
".",
"exception",
"(",
")",
":",
"future",
".",
"set_result",
"(",
"f",
".",
"result",
"(",
")",
")",
"return",
"# if f is a tornado future, then it has exc_info()",
"if",
"hasattr",
"(",
"f",
",",
"'exc_info'",
")",
":",
"exception",
",",
"traceback",
"=",
"f",
".",
"exc_info",
"(",
")",
"[",
"1",
":",
"]",
"# else it's a concurrent.future",
"else",
":",
"# python2's concurrent.future has exception_info()",
"if",
"hasattr",
"(",
"f",
",",
"'exception_info'",
")",
":",
"exception",
",",
"traceback",
"=",
"f",
".",
"exception_info",
"(",
")",
"# python3's concurrent.future just has exception()",
"else",
":",
"exception",
"=",
"f",
".",
"exception",
"(",
")",
"traceback",
"=",
"None",
"# python2 needs exc_info set explicitly",
"if",
"_FUTURE_HAS_EXC_INFO",
":",
"future",
".",
"set_exception_info",
"(",
"exception",
",",
"traceback",
")",
"return",
"# python3 just needs the exception, exc_info works fine",
"future",
".",
"set_exception",
"(",
"exception",
")",
"self",
".",
"_io_loop",
".",
"add_callback",
"(",
"execute",
")",
"return",
"future"
] | Submit Tornado Coroutine to IOLoop in daemonized thread.
:param fn: Tornado Coroutine to execute
:param args: Args to pass to coroutine
:param kwargs: Kwargs to pass to coroutine
:returns concurrent.futures.Future: future result of coroutine | [
"Submit",
"Tornado",
"Coroutine",
"to",
"IOLoop",
"in",
"daemonized",
"thread",
"."
] | 9d4f83660fd79fe15d741b831be9ee28dccacc30 | https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L98-L160 | train |
tamasgal/km3pipe | km3pipe/sys.py | peak_memory_usage | def peak_memory_usage():
"""Return peak memory usage in MB"""
if sys.platform.startswith('win'):
p = psutil.Process()
return p.memory_info().peak_wset / 1024 / 1024
mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
factor_mb = 1 / 1024
if sys.platform == 'darwin':
factor_mb = 1 / (1024 * 1024)
return mem * factor_mb | python | def peak_memory_usage():
"""Return peak memory usage in MB"""
if sys.platform.startswith('win'):
p = psutil.Process()
return p.memory_info().peak_wset / 1024 / 1024
mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
factor_mb = 1 / 1024
if sys.platform == 'darwin':
factor_mb = 1 / (1024 * 1024)
return mem * factor_mb | [
"def",
"peak_memory_usage",
"(",
")",
":",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'win'",
")",
":",
"p",
"=",
"psutil",
".",
"Process",
"(",
")",
"return",
"p",
".",
"memory_info",
"(",
")",
".",
"peak_wset",
"/",
"1024",
"/",
"1024",
"mem",
"=",
"resource",
".",
"getrusage",
"(",
"resource",
".",
"RUSAGE_SELF",
")",
".",
"ru_maxrss",
"factor_mb",
"=",
"1",
"/",
"1024",
"if",
"sys",
".",
"platform",
"==",
"'darwin'",
":",
"factor_mb",
"=",
"1",
"/",
"(",
"1024",
"*",
"1024",
")",
"return",
"mem",
"*",
"factor_mb"
] | Return peak memory usage in MB | [
"Return",
"peak",
"memory",
"usage",
"in",
"MB"
] | 7a9b59ac899a28775b5bdc5d391d9a5340d08040 | https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/sys.py#L41-L51 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.getPreferenceCounts | def getPreferenceCounts(self):
"""
Returns a list of the number of times each preference is given.
"""
preferenceCounts = []
for preference in self.preferences:
preferenceCounts.append(preference.count)
return preferenceCounts | python | def getPreferenceCounts(self):
"""
Returns a list of the number of times each preference is given.
"""
preferenceCounts = []
for preference in self.preferences:
preferenceCounts.append(preference.count)
return preferenceCounts | [
"def",
"getPreferenceCounts",
"(",
"self",
")",
":",
"preferenceCounts",
"=",
"[",
"]",
"for",
"preference",
"in",
"self",
".",
"preferences",
":",
"preferenceCounts",
".",
"append",
"(",
"preference",
".",
"count",
")",
"return",
"preferenceCounts"
] | Returns a list of the number of times each preference is given. | [
"Returns",
"a",
"list",
"of",
"the",
"number",
"of",
"times",
"each",
"preference",
"is",
"given",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L64-L72 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.getRankMaps | def getRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given.
"""
rankMaps = []
for preference in self.preferences:
rankMaps.append(preference.getRankMap())
return rankMaps | python | def getRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given.
"""
rankMaps = []
for preference in self.preferences:
rankMaps.append(preference.getRankMap())
return rankMaps | [
"def",
"getRankMaps",
"(",
"self",
")",
":",
"rankMaps",
"=",
"[",
"]",
"for",
"preference",
"in",
"self",
".",
"preferences",
":",
"rankMaps",
".",
"append",
"(",
"preference",
".",
"getRankMap",
"(",
")",
")",
"return",
"rankMaps"
] | Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given. | [
"Returns",
"a",
"list",
"of",
"dictionaries",
"one",
"for",
"each",
"preference",
"that",
"associates",
"the",
"integer",
"representation",
"of",
"each",
"candidate",
"with",
"its",
"position",
"in",
"the",
"ranking",
"starting",
"from",
"1",
"and",
"returns",
"a",
"list",
"of",
"the",
"number",
"of",
"times",
"each",
"preference",
"is",
"given",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L74-L84 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.getReverseRankMaps | def getReverseRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates each position in
the ranking with a list of integer representations of the candidates ranked at that
position and returns a list of the number of times each preference is given.
"""
reverseRankMaps = []
for preference in self.preferences:
reverseRankMaps.append(preference.getReverseRankMap())
return reverseRankMaps | python | def getReverseRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates each position in
the ranking with a list of integer representations of the candidates ranked at that
position and returns a list of the number of times each preference is given.
"""
reverseRankMaps = []
for preference in self.preferences:
reverseRankMaps.append(preference.getReverseRankMap())
return reverseRankMaps | [
"def",
"getReverseRankMaps",
"(",
"self",
")",
":",
"reverseRankMaps",
"=",
"[",
"]",
"for",
"preference",
"in",
"self",
".",
"preferences",
":",
"reverseRankMaps",
".",
"append",
"(",
"preference",
".",
"getReverseRankMap",
"(",
")",
")",
"return",
"reverseRankMaps"
] | Returns a list of dictionaries, one for each preference, that associates each position in
the ranking with a list of integer representations of the candidates ranked at that
position and returns a list of the number of times each preference is given. | [
"Returns",
"a",
"list",
"of",
"dictionaries",
"one",
"for",
"each",
"preference",
"that",
"associates",
"each",
"position",
"in",
"the",
"ranking",
"with",
"a",
"list",
"of",
"integer",
"representations",
"of",
"the",
"candidates",
"ranked",
"at",
"that",
"position",
"and",
"returns",
"a",
"list",
"of",
"the",
"number",
"of",
"times",
"each",
"preference",
"is",
"given",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L85-L95 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.exportPreflibFile | def exportPreflibFile(self, fileName):
"""
Exports a preflib format file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported.
"""
elecType = self.getElecType()
if elecType != "soc" and elecType != "toc" and elecType != "soi" and elecType != "toi":
print("ERROR: printing current type to preflib format is not supported")
exit()
# Generate a list of reverse rankMaps, one for each vote. This will allow us to easiliy
# identify ties.
reverseRankMaps = self.getReverseRankMaps()
outfileObj = open(fileName, 'w')
# Print the number of candidates and the integer representation and name of each candidate.
outfileObj.write(str(self.numCands))
for candInt, cand in self.candMap.items():
outfileObj.write("\n" + str(candInt) + "," + cand)
# Sum up the number of preferences that are represented.
preferenceCount = 0
for preference in self.preferences:
preferenceCount += preference.count
# Print the number of voters, the sum of vote count, and the number of unique orders.
outfileObj.write("\n" + str(self.numVoters) + "," + str(preferenceCount) + "," + str(len(self.preferences)))
for i in range(0, len(reverseRankMaps)):
# First, print the number of times the preference appears.
outfileObj.write("\n" + str(self.preferences[i].count))
reverseRankMap = reverseRankMaps[i]
# We sort the positions in increasing order and print the candidates at each position
# in order.
sortedKeys = sorted(reverseRankMap.keys())
for key in sortedKeys:
cands = reverseRankMap[key]
# If only one candidate is in a particular position, we assume there is no tie.
if len(cands) == 1:
outfileObj.write("," + str(cands[0]))
# If more than one candidate is in a particular position, they are tied. We print
# brackets around the candidates.
elif len(cands) > 1:
outfileObj.write(",{" + str(cands[0]))
for j in range(1, len(cands)):
outfileObj.write("," + str(cands[j]))
outfileObj.write("}")
outfileObj.close() | python | def exportPreflibFile(self, fileName):
"""
Exports a preflib format file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported.
"""
elecType = self.getElecType()
if elecType != "soc" and elecType != "toc" and elecType != "soi" and elecType != "toi":
print("ERROR: printing current type to preflib format is not supported")
exit()
# Generate a list of reverse rankMaps, one for each vote. This will allow us to easiliy
# identify ties.
reverseRankMaps = self.getReverseRankMaps()
outfileObj = open(fileName, 'w')
# Print the number of candidates and the integer representation and name of each candidate.
outfileObj.write(str(self.numCands))
for candInt, cand in self.candMap.items():
outfileObj.write("\n" + str(candInt) + "," + cand)
# Sum up the number of preferences that are represented.
preferenceCount = 0
for preference in self.preferences:
preferenceCount += preference.count
# Print the number of voters, the sum of vote count, and the number of unique orders.
outfileObj.write("\n" + str(self.numVoters) + "," + str(preferenceCount) + "," + str(len(self.preferences)))
for i in range(0, len(reverseRankMaps)):
# First, print the number of times the preference appears.
outfileObj.write("\n" + str(self.preferences[i].count))
reverseRankMap = reverseRankMaps[i]
# We sort the positions in increasing order and print the candidates at each position
# in order.
sortedKeys = sorted(reverseRankMap.keys())
for key in sortedKeys:
cands = reverseRankMap[key]
# If only one candidate is in a particular position, we assume there is no tie.
if len(cands) == 1:
outfileObj.write("," + str(cands[0]))
# If more than one candidate is in a particular position, they are tied. We print
# brackets around the candidates.
elif len(cands) > 1:
outfileObj.write(",{" + str(cands[0]))
for j in range(1, len(cands)):
outfileObj.write("," + str(cands[j]))
outfileObj.write("}")
outfileObj.close() | [
"def",
"exportPreflibFile",
"(",
"self",
",",
"fileName",
")",
":",
"elecType",
"=",
"self",
".",
"getElecType",
"(",
")",
"if",
"elecType",
"!=",
"\"soc\"",
"and",
"elecType",
"!=",
"\"toc\"",
"and",
"elecType",
"!=",
"\"soi\"",
"and",
"elecType",
"!=",
"\"toi\"",
":",
"print",
"(",
"\"ERROR: printing current type to preflib format is not supported\"",
")",
"exit",
"(",
")",
"# Generate a list of reverse rankMaps, one for each vote. This will allow us to easiliy",
"# identify ties.",
"reverseRankMaps",
"=",
"self",
".",
"getReverseRankMaps",
"(",
")",
"outfileObj",
"=",
"open",
"(",
"fileName",
",",
"'w'",
")",
"# Print the number of candidates and the integer representation and name of each candidate.",
"outfileObj",
".",
"write",
"(",
"str",
"(",
"self",
".",
"numCands",
")",
")",
"for",
"candInt",
",",
"cand",
"in",
"self",
".",
"candMap",
".",
"items",
"(",
")",
":",
"outfileObj",
".",
"write",
"(",
"\"\\n\"",
"+",
"str",
"(",
"candInt",
")",
"+",
"\",\"",
"+",
"cand",
")",
"# Sum up the number of preferences that are represented.",
"preferenceCount",
"=",
"0",
"for",
"preference",
"in",
"self",
".",
"preferences",
":",
"preferenceCount",
"+=",
"preference",
".",
"count",
"# Print the number of voters, the sum of vote count, and the number of unique orders.",
"outfileObj",
".",
"write",
"(",
"\"\\n\"",
"+",
"str",
"(",
"self",
".",
"numVoters",
")",
"+",
"\",\"",
"+",
"str",
"(",
"preferenceCount",
")",
"+",
"\",\"",
"+",
"str",
"(",
"len",
"(",
"self",
".",
"preferences",
")",
")",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"reverseRankMaps",
")",
")",
":",
"# First, print the number of times the preference appears.",
"outfileObj",
".",
"write",
"(",
"\"\\n\"",
"+",
"str",
"(",
"self",
".",
"preferences",
"[",
"i",
"]",
".",
"count",
")",
")",
"reverseRankMap",
"=",
"reverseRankMaps",
"[",
"i",
"]",
"# We sort the positions in increasing order and print the candidates at each position",
"# in order.",
"sortedKeys",
"=",
"sorted",
"(",
"reverseRankMap",
".",
"keys",
"(",
")",
")",
"for",
"key",
"in",
"sortedKeys",
":",
"cands",
"=",
"reverseRankMap",
"[",
"key",
"]",
"# If only one candidate is in a particular position, we assume there is no tie.",
"if",
"len",
"(",
"cands",
")",
"==",
"1",
":",
"outfileObj",
".",
"write",
"(",
"\",\"",
"+",
"str",
"(",
"cands",
"[",
"0",
"]",
")",
")",
"# If more than one candidate is in a particular position, they are tied. We print",
"# brackets around the candidates.",
"elif",
"len",
"(",
"cands",
")",
">",
"1",
":",
"outfileObj",
".",
"write",
"(",
"\",{\"",
"+",
"str",
"(",
"cands",
"[",
"0",
"]",
")",
")",
"for",
"j",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"cands",
")",
")",
":",
"outfileObj",
".",
"write",
"(",
"\",\"",
"+",
"str",
"(",
"cands",
"[",
"j",
"]",
")",
")",
"outfileObj",
".",
"write",
"(",
"\"}\"",
")",
"outfileObj",
".",
"close",
"(",
")"
] | Exports a preflib format file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported. | [
"Exports",
"a",
"preflib",
"format",
"file",
"that",
"contains",
"all",
"the",
"information",
"of",
"the",
"current",
"Profile",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L201-L259 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.importPreflibFile | def importPreflibFile(self, fileName):
"""
Imports a preflib format file that contains all the information of a Profile. This function
will completely override all members of the current Profile object. Currently, we assume
that in an election where incomplete ordering are allowed, if a voter ranks only one
candidate, then the voter did not prefer any candidates over another. This may lead to some
discrepancies when importing and exporting a .toi preflib file or a .soi preflib file.
:ivar str fileName: The name of the input file to be imported.
"""
# Use the functionality found in io to read the file.
elecFileObj = open(fileName, 'r')
self.candMap, rankMaps, wmgMapsCounts, self.numVoters = prefpy_io.read_election_file(elecFileObj)
elecFileObj.close()
self.numCands = len(self.candMap.keys())
# Go through the rankMaps and generate a wmgMap for each vote. Use the wmgMap to create a
# Preference object.
self.preferences = []
for i in range(0, len(rankMaps)):
wmgMap = self.genWmgMapFromRankMap(rankMaps[i])
self.preferences.append(Preference(wmgMap, wmgMapsCounts[i])) | python | def importPreflibFile(self, fileName):
"""
Imports a preflib format file that contains all the information of a Profile. This function
will completely override all members of the current Profile object. Currently, we assume
that in an election where incomplete ordering are allowed, if a voter ranks only one
candidate, then the voter did not prefer any candidates over another. This may lead to some
discrepancies when importing and exporting a .toi preflib file or a .soi preflib file.
:ivar str fileName: The name of the input file to be imported.
"""
# Use the functionality found in io to read the file.
elecFileObj = open(fileName, 'r')
self.candMap, rankMaps, wmgMapsCounts, self.numVoters = prefpy_io.read_election_file(elecFileObj)
elecFileObj.close()
self.numCands = len(self.candMap.keys())
# Go through the rankMaps and generate a wmgMap for each vote. Use the wmgMap to create a
# Preference object.
self.preferences = []
for i in range(0, len(rankMaps)):
wmgMap = self.genWmgMapFromRankMap(rankMaps[i])
self.preferences.append(Preference(wmgMap, wmgMapsCounts[i])) | [
"def",
"importPreflibFile",
"(",
"self",
",",
"fileName",
")",
":",
"# Use the functionality found in io to read the file.",
"elecFileObj",
"=",
"open",
"(",
"fileName",
",",
"'r'",
")",
"self",
".",
"candMap",
",",
"rankMaps",
",",
"wmgMapsCounts",
",",
"self",
".",
"numVoters",
"=",
"prefpy_io",
".",
"read_election_file",
"(",
"elecFileObj",
")",
"elecFileObj",
".",
"close",
"(",
")",
"self",
".",
"numCands",
"=",
"len",
"(",
"self",
".",
"candMap",
".",
"keys",
"(",
")",
")",
"# Go through the rankMaps and generate a wmgMap for each vote. Use the wmgMap to create a",
"# Preference object.",
"self",
".",
"preferences",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"rankMaps",
")",
")",
":",
"wmgMap",
"=",
"self",
".",
"genWmgMapFromRankMap",
"(",
"rankMaps",
"[",
"i",
"]",
")",
"self",
".",
"preferences",
".",
"append",
"(",
"Preference",
"(",
"wmgMap",
",",
"wmgMapsCounts",
"[",
"i",
"]",
")",
")"
] | Imports a preflib format file that contains all the information of a Profile. This function
will completely override all members of the current Profile object. Currently, we assume
that in an election where incomplete ordering are allowed, if a voter ranks only one
candidate, then the voter did not prefer any candidates over another. This may lead to some
discrepancies when importing and exporting a .toi preflib file or a .soi preflib file.
:ivar str fileName: The name of the input file to be imported. | [
"Imports",
"a",
"preflib",
"format",
"file",
"that",
"contains",
"all",
"the",
"information",
"of",
"a",
"Profile",
".",
"This",
"function",
"will",
"completely",
"override",
"all",
"members",
"of",
"the",
"current",
"Profile",
"object",
".",
"Currently",
"we",
"assume",
"that",
"in",
"an",
"election",
"where",
"incomplete",
"ordering",
"are",
"allowed",
"if",
"a",
"voter",
"ranks",
"only",
"one",
"candidate",
"then",
"the",
"voter",
"did",
"not",
"prefer",
"any",
"candidates",
"over",
"another",
".",
"This",
"may",
"lead",
"to",
"some",
"discrepancies",
"when",
"importing",
"and",
"exporting",
"a",
".",
"toi",
"preflib",
"file",
"or",
"a",
".",
"soi",
"preflib",
"file",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L261-L284 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.exportJsonFile | def exportJsonFile(self, fileName):
"""
Exports a json file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported.
"""
# Because our Profile class is not directly JSON serializable, we exporrt the underlying
# dictionary.
data = dict()
for key in self.__dict__.keys():
if key != "preferences":
data[key] = self.__dict__[key]
# The Preference class is also not directly JSON serializable, so we export the underlying
# dictionary for each Preference object.
preferenceDicts = []
for preference in self.preferences:
preferenceDict = dict()
for key in preference.__dict__.keys():
preferenceDict[key] = preference.__dict__[key]
preferenceDicts.append(preferenceDict)
data["preferences"] = preferenceDicts
outfile = open(fileName, 'w')
json.dump(data, outfile)
outfile.close() | python | def exportJsonFile(self, fileName):
"""
Exports a json file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported.
"""
# Because our Profile class is not directly JSON serializable, we exporrt the underlying
# dictionary.
data = dict()
for key in self.__dict__.keys():
if key != "preferences":
data[key] = self.__dict__[key]
# The Preference class is also not directly JSON serializable, so we export the underlying
# dictionary for each Preference object.
preferenceDicts = []
for preference in self.preferences:
preferenceDict = dict()
for key in preference.__dict__.keys():
preferenceDict[key] = preference.__dict__[key]
preferenceDicts.append(preferenceDict)
data["preferences"] = preferenceDicts
outfile = open(fileName, 'w')
json.dump(data, outfile)
outfile.close() | [
"def",
"exportJsonFile",
"(",
"self",
",",
"fileName",
")",
":",
"# Because our Profile class is not directly JSON serializable, we exporrt the underlying ",
"# dictionary. ",
"data",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"self",
".",
"__dict__",
".",
"keys",
"(",
")",
":",
"if",
"key",
"!=",
"\"preferences\"",
":",
"data",
"[",
"key",
"]",
"=",
"self",
".",
"__dict__",
"[",
"key",
"]",
"# The Preference class is also not directly JSON serializable, so we export the underlying",
"# dictionary for each Preference object.",
"preferenceDicts",
"=",
"[",
"]",
"for",
"preference",
"in",
"self",
".",
"preferences",
":",
"preferenceDict",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"preference",
".",
"__dict__",
".",
"keys",
"(",
")",
":",
"preferenceDict",
"[",
"key",
"]",
"=",
"preference",
".",
"__dict__",
"[",
"key",
"]",
"preferenceDicts",
".",
"append",
"(",
"preferenceDict",
")",
"data",
"[",
"\"preferences\"",
"]",
"=",
"preferenceDicts",
"outfile",
"=",
"open",
"(",
"fileName",
",",
"'w'",
")",
"json",
".",
"dump",
"(",
"data",
",",
"outfile",
")",
"outfile",
".",
"close",
"(",
")"
] | Exports a json file that contains all the information of the current Profile.
:ivar str fileName: The name of the output file to be exported. | [
"Exports",
"a",
"json",
"file",
"that",
"contains",
"all",
"the",
"information",
"of",
"the",
"current",
"Profile",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L286-L312 | train |
PrefPy/prefpy | prefpy/profile.py | Profile.importJsonFile | def importJsonFile(self, fileName):
"""
Imports a json file that contains all the information of a Profile. This function will
completely override all members of the current Profile object.
:ivar str fileName: The name of the input file to be imported.
"""
infile = open(fileName)
data = json.load(infile)
infile.close()
self.numCands = int(data["numCands"])
self.numVoters = int(data["numVoters"])
# Because the json.load function imports everything as unicode strings, we will go through
# the candMap dictionary and convert all the keys to integers and convert all the values to
# ascii strings.
candMap = dict()
for key in data["candMap"].keys():
candMap[int(key)] = data["candMap"][key].encode("ascii")
self.candMap = candMap
# The Preference class is also not directly JSON serializable, so we exported the
# underlying dictionary for each Preference object. When we import, we will create a
# Preference object from these dictionaries.
self.preferences = []
for preferenceMap in data["preferences"]:
count = int(preferenceMap["count"])
# Because json.load imports all the items in the wmgMap as unicode strings, we need to
# convert all the keys and values into integers.
preferenceWmgMap = preferenceMap["wmgMap"]
wmgMap = dict()
for key in preferenceWmgMap.keys():
wmgMap[int(key)] = dict()
for key2 in preferenceWmgMap[key].keys():
wmgMap[int(key)][int(key2)] = int(preferenceWmgMap[key][key2])
self.preferences.append(Preference(wmgMap, count)) | python | def importJsonFile(self, fileName):
"""
Imports a json file that contains all the information of a Profile. This function will
completely override all members of the current Profile object.
:ivar str fileName: The name of the input file to be imported.
"""
infile = open(fileName)
data = json.load(infile)
infile.close()
self.numCands = int(data["numCands"])
self.numVoters = int(data["numVoters"])
# Because the json.load function imports everything as unicode strings, we will go through
# the candMap dictionary and convert all the keys to integers and convert all the values to
# ascii strings.
candMap = dict()
for key in data["candMap"].keys():
candMap[int(key)] = data["candMap"][key].encode("ascii")
self.candMap = candMap
# The Preference class is also not directly JSON serializable, so we exported the
# underlying dictionary for each Preference object. When we import, we will create a
# Preference object from these dictionaries.
self.preferences = []
for preferenceMap in data["preferences"]:
count = int(preferenceMap["count"])
# Because json.load imports all the items in the wmgMap as unicode strings, we need to
# convert all the keys and values into integers.
preferenceWmgMap = preferenceMap["wmgMap"]
wmgMap = dict()
for key in preferenceWmgMap.keys():
wmgMap[int(key)] = dict()
for key2 in preferenceWmgMap[key].keys():
wmgMap[int(key)][int(key2)] = int(preferenceWmgMap[key][key2])
self.preferences.append(Preference(wmgMap, count)) | [
"def",
"importJsonFile",
"(",
"self",
",",
"fileName",
")",
":",
"infile",
"=",
"open",
"(",
"fileName",
")",
"data",
"=",
"json",
".",
"load",
"(",
"infile",
")",
"infile",
".",
"close",
"(",
")",
"self",
".",
"numCands",
"=",
"int",
"(",
"data",
"[",
"\"numCands\"",
"]",
")",
"self",
".",
"numVoters",
"=",
"int",
"(",
"data",
"[",
"\"numVoters\"",
"]",
")",
"# Because the json.load function imports everything as unicode strings, we will go through",
"# the candMap dictionary and convert all the keys to integers and convert all the values to",
"# ascii strings.",
"candMap",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"data",
"[",
"\"candMap\"",
"]",
".",
"keys",
"(",
")",
":",
"candMap",
"[",
"int",
"(",
"key",
")",
"]",
"=",
"data",
"[",
"\"candMap\"",
"]",
"[",
"key",
"]",
".",
"encode",
"(",
"\"ascii\"",
")",
"self",
".",
"candMap",
"=",
"candMap",
"# The Preference class is also not directly JSON serializable, so we exported the ",
"# underlying dictionary for each Preference object. When we import, we will create a ",
"# Preference object from these dictionaries.",
"self",
".",
"preferences",
"=",
"[",
"]",
"for",
"preferenceMap",
"in",
"data",
"[",
"\"preferences\"",
"]",
":",
"count",
"=",
"int",
"(",
"preferenceMap",
"[",
"\"count\"",
"]",
")",
"# Because json.load imports all the items in the wmgMap as unicode strings, we need to",
"# convert all the keys and values into integers.",
"preferenceWmgMap",
"=",
"preferenceMap",
"[",
"\"wmgMap\"",
"]",
"wmgMap",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"preferenceWmgMap",
".",
"keys",
"(",
")",
":",
"wmgMap",
"[",
"int",
"(",
"key",
")",
"]",
"=",
"dict",
"(",
")",
"for",
"key2",
"in",
"preferenceWmgMap",
"[",
"key",
"]",
".",
"keys",
"(",
")",
":",
"wmgMap",
"[",
"int",
"(",
"key",
")",
"]",
"[",
"int",
"(",
"key2",
")",
"]",
"=",
"int",
"(",
"preferenceWmgMap",
"[",
"key",
"]",
"[",
"key2",
"]",
")",
"self",
".",
"preferences",
".",
"append",
"(",
"Preference",
"(",
"wmgMap",
",",
"count",
")",
")"
] | Imports a json file that contains all the information of a Profile. This function will
completely override all members of the current Profile object.
:ivar str fileName: The name of the input file to be imported. | [
"Imports",
"a",
"json",
"file",
"that",
"contains",
"all",
"the",
"information",
"of",
"a",
"Profile",
".",
"This",
"function",
"will",
"completely",
"override",
"all",
"members",
"of",
"the",
"current",
"Profile",
"object",
"."
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L314-L353 | train |
PrefPy/prefpy | prefpy/mmgbtl.py | main | def main():
"""Driver function for the computation of the MM algorithm"""
# test example below taken from GMMRA by Azari, Chen, Parkes, & Xia
cand_set = [0, 1, 2]
votes = [[0, 1, 2], [1, 2, 0]]
mmagg = MMPLAggregator(cand_set)
gamma = mmagg.aggregate(votes, epsilon=1e-7, max_iters=20)
print(mmagg.alts_to_ranks, mmagg.ranks_to_alts)
assert([mmagg.get_ranking(i) for i in cand_set] == [1,0,2])
print(gamma) | python | def main():
"""Driver function for the computation of the MM algorithm"""
# test example below taken from GMMRA by Azari, Chen, Parkes, & Xia
cand_set = [0, 1, 2]
votes = [[0, 1, 2], [1, 2, 0]]
mmagg = MMPLAggregator(cand_set)
gamma = mmagg.aggregate(votes, epsilon=1e-7, max_iters=20)
print(mmagg.alts_to_ranks, mmagg.ranks_to_alts)
assert([mmagg.get_ranking(i) for i in cand_set] == [1,0,2])
print(gamma) | [
"def",
"main",
"(",
")",
":",
"# test example below taken from GMMRA by Azari, Chen, Parkes, & Xia\r",
"cand_set",
"=",
"[",
"0",
",",
"1",
",",
"2",
"]",
"votes",
"=",
"[",
"[",
"0",
",",
"1",
",",
"2",
"]",
",",
"[",
"1",
",",
"2",
",",
"0",
"]",
"]",
"mmagg",
"=",
"MMPLAggregator",
"(",
"cand_set",
")",
"gamma",
"=",
"mmagg",
".",
"aggregate",
"(",
"votes",
",",
"epsilon",
"=",
"1e-7",
",",
"max_iters",
"=",
"20",
")",
"print",
"(",
"mmagg",
".",
"alts_to_ranks",
",",
"mmagg",
".",
"ranks_to_alts",
")",
"assert",
"(",
"[",
"mmagg",
".",
"get_ranking",
"(",
"i",
")",
"for",
"i",
"in",
"cand_set",
"]",
"==",
"[",
"1",
",",
"0",
",",
"2",
"]",
")",
"print",
"(",
"gamma",
")"
] | Driver function for the computation of the MM algorithm | [
"Driver",
"function",
"for",
"the",
"computation",
"of",
"the",
"MM",
"algorithm"
] | f395ba3782f05684fa5de0cece387a6da9391d02 | https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/mmgbtl.py#L73-L84 | train |
aouyar/healthgraph-api | healthgraph/authmgr.py | AuthManager.get_login_url | def get_login_url(self, state=None):
"""Generates and returns URL for redirecting to Login Page of RunKeeper,
which is the Authorization Endpoint of Health Graph API.
@param state: State string. Passed to client web application at the end
of the Login Process.
@return: URL for redirecting to RunKeeper Login Page.
"""
payload = {'response_type': 'code',
'client_id': self._client_id,
'redirect_uri': self._redirect_uri,}
if state is not None:
payload['state'] = state
return "%s?%s" % (settings.API_AUTHORIZATION_URL,
urllib.urlencode(payload)) | python | def get_login_url(self, state=None):
"""Generates and returns URL for redirecting to Login Page of RunKeeper,
which is the Authorization Endpoint of Health Graph API.
@param state: State string. Passed to client web application at the end
of the Login Process.
@return: URL for redirecting to RunKeeper Login Page.
"""
payload = {'response_type': 'code',
'client_id': self._client_id,
'redirect_uri': self._redirect_uri,}
if state is not None:
payload['state'] = state
return "%s?%s" % (settings.API_AUTHORIZATION_URL,
urllib.urlencode(payload)) | [
"def",
"get_login_url",
"(",
"self",
",",
"state",
"=",
"None",
")",
":",
"payload",
"=",
"{",
"'response_type'",
":",
"'code'",
",",
"'client_id'",
":",
"self",
".",
"_client_id",
",",
"'redirect_uri'",
":",
"self",
".",
"_redirect_uri",
",",
"}",
"if",
"state",
"is",
"not",
"None",
":",
"payload",
"[",
"'state'",
"]",
"=",
"state",
"return",
"\"%s?%s\"",
"%",
"(",
"settings",
".",
"API_AUTHORIZATION_URL",
",",
"urllib",
".",
"urlencode",
"(",
"payload",
")",
")"
] | Generates and returns URL for redirecting to Login Page of RunKeeper,
which is the Authorization Endpoint of Health Graph API.
@param state: State string. Passed to client web application at the end
of the Login Process.
@return: URL for redirecting to RunKeeper Login Page. | [
"Generates",
"and",
"returns",
"URL",
"for",
"redirecting",
"to",
"Login",
"Page",
"of",
"RunKeeper",
"which",
"is",
"the",
"Authorization",
"Endpoint",
"of",
"Health",
"Graph",
"API",
"."
] | fc5135ab353ca1f05e8a70ec784ff921e686c072 | https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L47-L62 | train |
aouyar/healthgraph-api | healthgraph/authmgr.py | AuthManager.get_login_button_url | def get_login_button_url(self, button_color=None, caption_color=None, button_size=None):
"""Return URL for image used for RunKeeper Login button.
@param button_color: Button color. Either 'blue', 'grey' or 'black'.
Default: 'blue'.
@param caption_color: Button text color. Either 'white' or 'black'.
Default: 'white'
@param button_size: Button width in pixels. Either 200, 300 or 600.
Default: 200
@return: URL for Login Button Image.
"""
if not button_color in settings.LOGIN_BUTTON_COLORS:
button_color = settings.LOGIN_BUTTON_COLORS[0]
if not caption_color in settings.LOGIN_BUTTON_CAPTION_COLORS:
caption_color = settings.LOGIN_BUTTON_CAPTION_COLORS[0]
if settings.LOGIN_BUTTON_SIZES.has_key(button_size):
button_size = settings.LOGIN_BUTTON_SIZES[button_size]
else:
button_size = settings.LOGIN_BUTTON_SIZES['None']
return settings.LOGIN_BUTTON_URL % (button_color,
caption_color,
button_size) | python | def get_login_button_url(self, button_color=None, caption_color=None, button_size=None):
"""Return URL for image used for RunKeeper Login button.
@param button_color: Button color. Either 'blue', 'grey' or 'black'.
Default: 'blue'.
@param caption_color: Button text color. Either 'white' or 'black'.
Default: 'white'
@param button_size: Button width in pixels. Either 200, 300 or 600.
Default: 200
@return: URL for Login Button Image.
"""
if not button_color in settings.LOGIN_BUTTON_COLORS:
button_color = settings.LOGIN_BUTTON_COLORS[0]
if not caption_color in settings.LOGIN_BUTTON_CAPTION_COLORS:
caption_color = settings.LOGIN_BUTTON_CAPTION_COLORS[0]
if settings.LOGIN_BUTTON_SIZES.has_key(button_size):
button_size = settings.LOGIN_BUTTON_SIZES[button_size]
else:
button_size = settings.LOGIN_BUTTON_SIZES['None']
return settings.LOGIN_BUTTON_URL % (button_color,
caption_color,
button_size) | [
"def",
"get_login_button_url",
"(",
"self",
",",
"button_color",
"=",
"None",
",",
"caption_color",
"=",
"None",
",",
"button_size",
"=",
"None",
")",
":",
"if",
"not",
"button_color",
"in",
"settings",
".",
"LOGIN_BUTTON_COLORS",
":",
"button_color",
"=",
"settings",
".",
"LOGIN_BUTTON_COLORS",
"[",
"0",
"]",
"if",
"not",
"caption_color",
"in",
"settings",
".",
"LOGIN_BUTTON_CAPTION_COLORS",
":",
"caption_color",
"=",
"settings",
".",
"LOGIN_BUTTON_CAPTION_COLORS",
"[",
"0",
"]",
"if",
"settings",
".",
"LOGIN_BUTTON_SIZES",
".",
"has_key",
"(",
"button_size",
")",
":",
"button_size",
"=",
"settings",
".",
"LOGIN_BUTTON_SIZES",
"[",
"button_size",
"]",
"else",
":",
"button_size",
"=",
"settings",
".",
"LOGIN_BUTTON_SIZES",
"[",
"'None'",
"]",
"return",
"settings",
".",
"LOGIN_BUTTON_URL",
"%",
"(",
"button_color",
",",
"caption_color",
",",
"button_size",
")"
] | Return URL for image used for RunKeeper Login button.
@param button_color: Button color. Either 'blue', 'grey' or 'black'.
Default: 'blue'.
@param caption_color: Button text color. Either 'white' or 'black'.
Default: 'white'
@param button_size: Button width in pixels. Either 200, 300 or 600.
Default: 200
@return: URL for Login Button Image. | [
"Return",
"URL",
"for",
"image",
"used",
"for",
"RunKeeper",
"Login",
"button",
"."
] | fc5135ab353ca1f05e8a70ec784ff921e686c072 | https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L64-L86 | train |
aouyar/healthgraph-api | healthgraph/authmgr.py | AuthManager.get_access_token | def get_access_token(self, code):
"""Returns Access Token retrieved from the Health Graph API Token
Endpoint following the login to RunKeeper.
to RunKeeper.
@param code: Code returned by Health Graph API at the Authorization or
RunKeeper Login phase.
@return: Access Token for querying the Health Graph API.
"""
payload = {'grant_type': 'authorization_code',
'code': code,
'client_id': self._client_id,
'client_secret': self._client_secret,
'redirect_uri': self._redirect_uri,}
req = requests.post(settings.API_ACCESS_TOKEN_URL, data=payload)
data = req.json()
return data.get('access_token') | python | def get_access_token(self, code):
"""Returns Access Token retrieved from the Health Graph API Token
Endpoint following the login to RunKeeper.
to RunKeeper.
@param code: Code returned by Health Graph API at the Authorization or
RunKeeper Login phase.
@return: Access Token for querying the Health Graph API.
"""
payload = {'grant_type': 'authorization_code',
'code': code,
'client_id': self._client_id,
'client_secret': self._client_secret,
'redirect_uri': self._redirect_uri,}
req = requests.post(settings.API_ACCESS_TOKEN_URL, data=payload)
data = req.json()
return data.get('access_token') | [
"def",
"get_access_token",
"(",
"self",
",",
"code",
")",
":",
"payload",
"=",
"{",
"'grant_type'",
":",
"'authorization_code'",
",",
"'code'",
":",
"code",
",",
"'client_id'",
":",
"self",
".",
"_client_id",
",",
"'client_secret'",
":",
"self",
".",
"_client_secret",
",",
"'redirect_uri'",
":",
"self",
".",
"_redirect_uri",
",",
"}",
"req",
"=",
"requests",
".",
"post",
"(",
"settings",
".",
"API_ACCESS_TOKEN_URL",
",",
"data",
"=",
"payload",
")",
"data",
"=",
"req",
".",
"json",
"(",
")",
"return",
"data",
".",
"get",
"(",
"'access_token'",
")"
] | Returns Access Token retrieved from the Health Graph API Token
Endpoint following the login to RunKeeper.
to RunKeeper.
@param code: Code returned by Health Graph API at the Authorization or
RunKeeper Login phase.
@return: Access Token for querying the Health Graph API. | [
"Returns",
"Access",
"Token",
"retrieved",
"from",
"the",
"Health",
"Graph",
"API",
"Token",
"Endpoint",
"following",
"the",
"login",
"to",
"RunKeeper",
".",
"to",
"RunKeeper",
"."
] | fc5135ab353ca1f05e8a70ec784ff921e686c072 | https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L88-L105 | train |
aouyar/healthgraph-api | healthgraph/authmgr.py | AuthManager.revoke_access_token | def revoke_access_token(self, access_token):
"""Revokes the Access Token by accessing the De-authorization Endpoint
of Health Graph API.
@param access_token: Access Token for querying Health Graph API.
"""
payload = {'access_token': access_token,}
req = requests.post(settings.API_DEAUTHORIZATION_URL, data=payload) | python | def revoke_access_token(self, access_token):
"""Revokes the Access Token by accessing the De-authorization Endpoint
of Health Graph API.
@param access_token: Access Token for querying Health Graph API.
"""
payload = {'access_token': access_token,}
req = requests.post(settings.API_DEAUTHORIZATION_URL, data=payload) | [
"def",
"revoke_access_token",
"(",
"self",
",",
"access_token",
")",
":",
"payload",
"=",
"{",
"'access_token'",
":",
"access_token",
",",
"}",
"req",
"=",
"requests",
".",
"post",
"(",
"settings",
".",
"API_DEAUTHORIZATION_URL",
",",
"data",
"=",
"payload",
")"
] | Revokes the Access Token by accessing the De-authorization Endpoint
of Health Graph API.
@param access_token: Access Token for querying Health Graph API. | [
"Revokes",
"the",
"Access",
"Token",
"by",
"accessing",
"the",
"De",
"-",
"authorization",
"Endpoint",
"of",
"Health",
"Graph",
"API",
"."
] | fc5135ab353ca1f05e8a70ec784ff921e686c072 | https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L107-L115 | train |
IRC-SPHERE/HyperStream | hyperstream/time_interval.py | TimeIntervals.split | def split(self, points):
'''Splits the list of time intervals in the specified points
The function assumes that the time intervals do not overlap and ignores
points that are not inside of any interval.
Parameters
==========
points: list of datetime
'''
for p in points:
for i in range(len(self.intervals)):
if (self.intervals[i].start < p) and (self.intervals[i].end > p):
self.intervals = (self.intervals[:i]
+ [TimeInterval(self.intervals[i].start, p),
TimeInterval(p, self.intervals[i].end)]
+ self.intervals[(i + 1):])
break | python | def split(self, points):
'''Splits the list of time intervals in the specified points
The function assumes that the time intervals do not overlap and ignores
points that are not inside of any interval.
Parameters
==========
points: list of datetime
'''
for p in points:
for i in range(len(self.intervals)):
if (self.intervals[i].start < p) and (self.intervals[i].end > p):
self.intervals = (self.intervals[:i]
+ [TimeInterval(self.intervals[i].start, p),
TimeInterval(p, self.intervals[i].end)]
+ self.intervals[(i + 1):])
break | [
"def",
"split",
"(",
"self",
",",
"points",
")",
":",
"for",
"p",
"in",
"points",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"intervals",
")",
")",
":",
"if",
"(",
"self",
".",
"intervals",
"[",
"i",
"]",
".",
"start",
"<",
"p",
")",
"and",
"(",
"self",
".",
"intervals",
"[",
"i",
"]",
".",
"end",
">",
"p",
")",
":",
"self",
".",
"intervals",
"=",
"(",
"self",
".",
"intervals",
"[",
":",
"i",
"]",
"+",
"[",
"TimeInterval",
"(",
"self",
".",
"intervals",
"[",
"i",
"]",
".",
"start",
",",
"p",
")",
",",
"TimeInterval",
"(",
"p",
",",
"self",
".",
"intervals",
"[",
"i",
"]",
".",
"end",
")",
"]",
"+",
"self",
".",
"intervals",
"[",
"(",
"i",
"+",
"1",
")",
":",
"]",
")",
"break"
] | Splits the list of time intervals in the specified points
The function assumes that the time intervals do not overlap and ignores
points that are not inside of any interval.
Parameters
==========
points: list of datetime | [
"Splits",
"the",
"list",
"of",
"time",
"intervals",
"in",
"the",
"specified",
"points"
] | 98478f4d31ed938f4aa7c958ed0d4c3ffcb2e780 | https://github.com/IRC-SPHERE/HyperStream/blob/98478f4d31ed938f4aa7c958ed0d4c3ffcb2e780/hyperstream/time_interval.py#L118-L135 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.create | def create(cls, data, **kwargs):
"""Create a new Workflow Object with given content."""
with db.session.begin_nested():
model = cls.dbmodel(**kwargs)
model.data = data
obj = cls(model)
db.session.add(obj.model)
return obj | python | def create(cls, data, **kwargs):
"""Create a new Workflow Object with given content."""
with db.session.begin_nested():
model = cls.dbmodel(**kwargs)
model.data = data
obj = cls(model)
db.session.add(obj.model)
return obj | [
"def",
"create",
"(",
"cls",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"db",
".",
"session",
".",
"begin_nested",
"(",
")",
":",
"model",
"=",
"cls",
".",
"dbmodel",
"(",
"*",
"*",
"kwargs",
")",
"model",
".",
"data",
"=",
"data",
"obj",
"=",
"cls",
"(",
"model",
")",
"db",
".",
"session",
".",
"add",
"(",
"obj",
".",
"model",
")",
"return",
"obj"
] | Create a new Workflow Object with given content. | [
"Create",
"a",
"new",
"Workflow",
"Object",
"with",
"given",
"content",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L139-L146 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.get | def get(cls, id_):
"""Return a workflow object from id."""
with db.session.no_autoflush:
query = cls.dbmodel.query.filter_by(id=id_)
try:
model = query.one()
except NoResultFound:
raise WorkflowsMissingObject("No object for for id {0}".format(
id_
))
return cls(model) | python | def get(cls, id_):
"""Return a workflow object from id."""
with db.session.no_autoflush:
query = cls.dbmodel.query.filter_by(id=id_)
try:
model = query.one()
except NoResultFound:
raise WorkflowsMissingObject("No object for for id {0}".format(
id_
))
return cls(model) | [
"def",
"get",
"(",
"cls",
",",
"id_",
")",
":",
"with",
"db",
".",
"session",
".",
"no_autoflush",
":",
"query",
"=",
"cls",
".",
"dbmodel",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"id_",
")",
"try",
":",
"model",
"=",
"query",
".",
"one",
"(",
")",
"except",
"NoResultFound",
":",
"raise",
"WorkflowsMissingObject",
"(",
"\"No object for for id {0}\"",
".",
"format",
"(",
"id_",
")",
")",
"return",
"cls",
"(",
"model",
")"
] | Return a workflow object from id. | [
"Return",
"a",
"workflow",
"object",
"from",
"id",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L149-L159 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.query | def query(cls, *criteria, **filters):
"""Wrap sqlalchemy query methods.
A wrapper for the filter and filter_by functions of sqlalchemy.
Define a dict with which columns should be filtered by which values.
.. codeblock:: python
WorkflowObject.query(id=123)
WorkflowObject.query(status=ObjectStatus.COMPLETED)
The function supports also "hybrid" arguments using WorkflowObjectModel
indirectly.
.. codeblock:: python
WorkflowObject.query(
WorkflowObject.dbmodel.status == ObjectStatus.COMPLETED,
user_id=user_id
)
See also SQLAlchemy BaseQuery's filter and filter_by documentation.
"""
query = cls.dbmodel.query.filter(
*criteria).filter_by(**filters)
return [cls(obj) for obj in query.all()] | python | def query(cls, *criteria, **filters):
"""Wrap sqlalchemy query methods.
A wrapper for the filter and filter_by functions of sqlalchemy.
Define a dict with which columns should be filtered by which values.
.. codeblock:: python
WorkflowObject.query(id=123)
WorkflowObject.query(status=ObjectStatus.COMPLETED)
The function supports also "hybrid" arguments using WorkflowObjectModel
indirectly.
.. codeblock:: python
WorkflowObject.query(
WorkflowObject.dbmodel.status == ObjectStatus.COMPLETED,
user_id=user_id
)
See also SQLAlchemy BaseQuery's filter and filter_by documentation.
"""
query = cls.dbmodel.query.filter(
*criteria).filter_by(**filters)
return [cls(obj) for obj in query.all()] | [
"def",
"query",
"(",
"cls",
",",
"*",
"criteria",
",",
"*",
"*",
"filters",
")",
":",
"query",
"=",
"cls",
".",
"dbmodel",
".",
"query",
".",
"filter",
"(",
"*",
"criteria",
")",
".",
"filter_by",
"(",
"*",
"*",
"filters",
")",
"return",
"[",
"cls",
"(",
"obj",
")",
"for",
"obj",
"in",
"query",
".",
"all",
"(",
")",
"]"
] | Wrap sqlalchemy query methods.
A wrapper for the filter and filter_by functions of sqlalchemy.
Define a dict with which columns should be filtered by which values.
.. codeblock:: python
WorkflowObject.query(id=123)
WorkflowObject.query(status=ObjectStatus.COMPLETED)
The function supports also "hybrid" arguments using WorkflowObjectModel
indirectly.
.. codeblock:: python
WorkflowObject.query(
WorkflowObject.dbmodel.status == ObjectStatus.COMPLETED,
user_id=user_id
)
See also SQLAlchemy BaseQuery's filter and filter_by documentation. | [
"Wrap",
"sqlalchemy",
"query",
"methods",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L162-L187 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.delete | def delete(self, force=False):
"""Delete a workflow object.
If `force` is ``False``, the record is soft-deleted, i.e. the record
stays in the database. This ensures e.g. that the same record
identifier cannot be used twice, and that you can still retrieve the
history of an object. If `force` is True, the record is completely
removed from the database.
:param force: Completely remove record from database.
"""
if self.model is None:
raise WorkflowsMissingModel()
with db.session.begin_nested():
db.session.delete(self.model)
return self | python | def delete(self, force=False):
"""Delete a workflow object.
If `force` is ``False``, the record is soft-deleted, i.e. the record
stays in the database. This ensures e.g. that the same record
identifier cannot be used twice, and that you can still retrieve the
history of an object. If `force` is True, the record is completely
removed from the database.
:param force: Completely remove record from database.
"""
if self.model is None:
raise WorkflowsMissingModel()
with db.session.begin_nested():
db.session.delete(self.model)
return self | [
"def",
"delete",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"if",
"self",
".",
"model",
"is",
"None",
":",
"raise",
"WorkflowsMissingModel",
"(",
")",
"with",
"db",
".",
"session",
".",
"begin_nested",
"(",
")",
":",
"db",
".",
"session",
".",
"delete",
"(",
"self",
".",
"model",
")",
"return",
"self"
] | Delete a workflow object.
If `force` is ``False``, the record is soft-deleted, i.e. the record
stays in the database. This ensures e.g. that the same record
identifier cannot be used twice, and that you can still retrieve the
history of an object. If `force` is True, the record is completely
removed from the database.
:param force: Completely remove record from database. | [
"Delete",
"a",
"workflow",
"object",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L189-L206 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.set_action | def set_action(self, action, message):
"""Set the action to be taken for this object.
Assign an special "action" to this object to be taken
in consideration in Holding Pen. The widget is referred to
by a string with the filename minus extension.
A message is also needed to tell the user the action
required in a textual way.
:param action: name of the action to add (i.e. "approval")
:type action: string
:param message: message to show to the user
:type message: string
"""
self.extra_data["_action"] = action
self.extra_data["_message"] = message | python | def set_action(self, action, message):
"""Set the action to be taken for this object.
Assign an special "action" to this object to be taken
in consideration in Holding Pen. The widget is referred to
by a string with the filename minus extension.
A message is also needed to tell the user the action
required in a textual way.
:param action: name of the action to add (i.e. "approval")
:type action: string
:param message: message to show to the user
:type message: string
"""
self.extra_data["_action"] = action
self.extra_data["_message"] = message | [
"def",
"set_action",
"(",
"self",
",",
"action",
",",
"message",
")",
":",
"self",
".",
"extra_data",
"[",
"\"_action\"",
"]",
"=",
"action",
"self",
".",
"extra_data",
"[",
"\"_message\"",
"]",
"=",
"message"
] | Set the action to be taken for this object.
Assign an special "action" to this object to be taken
in consideration in Holding Pen. The widget is referred to
by a string with the filename minus extension.
A message is also needed to tell the user the action
required in a textual way.
:param action: name of the action to add (i.e. "approval")
:type action: string
:param message: message to show to the user
:type message: string | [
"Set",
"the",
"action",
"to",
"be",
"taken",
"for",
"this",
"object",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L280-L297 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.start_workflow | def start_workflow(self, workflow_name, delayed=False, **kwargs):
"""Run the workflow specified on the object.
:param workflow_name: name of workflow to run
:type workflow_name: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult).
"""
from .tasks import start
if delayed:
self.save()
db.session.commit()
return start.delay(workflow_name, object_id=self.id, **kwargs)
else:
return start(workflow_name, data=[self], **kwargs) | python | def start_workflow(self, workflow_name, delayed=False, **kwargs):
"""Run the workflow specified on the object.
:param workflow_name: name of workflow to run
:type workflow_name: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult).
"""
from .tasks import start
if delayed:
self.save()
db.session.commit()
return start.delay(workflow_name, object_id=self.id, **kwargs)
else:
return start(workflow_name, data=[self], **kwargs) | [
"def",
"start_workflow",
"(",
"self",
",",
"workflow_name",
",",
"delayed",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"tasks",
"import",
"start",
"if",
"delayed",
":",
"self",
".",
"save",
"(",
")",
"db",
".",
"session",
".",
"commit",
"(",
")",
"return",
"start",
".",
"delay",
"(",
"workflow_name",
",",
"object_id",
"=",
"self",
".",
"id",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"start",
"(",
"workflow_name",
",",
"data",
"=",
"[",
"self",
"]",
",",
"*",
"*",
"kwargs",
")"
] | Run the workflow specified on the object.
:param workflow_name: name of workflow to run
:type workflow_name: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult). | [
"Run",
"the",
"workflow",
"specified",
"on",
"the",
"object",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L327-L345 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.continue_workflow | def continue_workflow(self, start_point="continue_next",
delayed=False, **kwargs):
"""Continue the workflow for this object.
The parameter `start_point` allows you to specify the point of where
the workflow shall continue:
* restart_prev: will restart from the previous task
* continue_next: will continue to the next task
* restart_task: will restart the current task
:param start_point: where should the workflow start from?
:type start_point: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult).
"""
from .tasks import resume
self.save()
if not self.id_workflow:
raise WorkflowAPIError("No workflow associated with object: %r"
% (repr(self),))
if delayed:
db.session.commit()
return resume.delay(self.id, start_point, **kwargs)
else:
return resume(self.id, start_point, **kwargs) | python | def continue_workflow(self, start_point="continue_next",
delayed=False, **kwargs):
"""Continue the workflow for this object.
The parameter `start_point` allows you to specify the point of where
the workflow shall continue:
* restart_prev: will restart from the previous task
* continue_next: will continue to the next task
* restart_task: will restart the current task
:param start_point: where should the workflow start from?
:type start_point: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult).
"""
from .tasks import resume
self.save()
if not self.id_workflow:
raise WorkflowAPIError("No workflow associated with object: %r"
% (repr(self),))
if delayed:
db.session.commit()
return resume.delay(self.id, start_point, **kwargs)
else:
return resume(self.id, start_point, **kwargs) | [
"def",
"continue_workflow",
"(",
"self",
",",
"start_point",
"=",
"\"continue_next\"",
",",
"delayed",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"tasks",
"import",
"resume",
"self",
".",
"save",
"(",
")",
"if",
"not",
"self",
".",
"id_workflow",
":",
"raise",
"WorkflowAPIError",
"(",
"\"No workflow associated with object: %r\"",
"%",
"(",
"repr",
"(",
"self",
")",
",",
")",
")",
"if",
"delayed",
":",
"db",
".",
"session",
".",
"commit",
"(",
")",
"return",
"resume",
".",
"delay",
"(",
"self",
".",
"id",
",",
"start_point",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"resume",
"(",
"self",
".",
"id",
",",
"start_point",
",",
"*",
"*",
"kwargs",
")"
] | Continue the workflow for this object.
The parameter `start_point` allows you to specify the point of where
the workflow shall continue:
* restart_prev: will restart from the previous task
* continue_next: will continue to the next task
* restart_task: will restart the current task
:param start_point: where should the workflow start from?
:type start_point: str
:param delayed: should the workflow run asynchronously?
:type delayed: bool
:return: UUID of WorkflowEngine (or AsyncResult). | [
"Continue",
"the",
"workflow",
"for",
"this",
"object",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L347-L378 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/api.py | WorkflowObject.get_current_task_info | def get_current_task_info(self):
"""Return dictionary of current task function info for this object."""
name = self.model.workflow.name
if not name:
return
current_task = workflows[name].workflow
for step in self.callback_pos:
current_task = current_task[step]
if callable(current_task):
return get_func_info(current_task) | python | def get_current_task_info(self):
"""Return dictionary of current task function info for this object."""
name = self.model.workflow.name
if not name:
return
current_task = workflows[name].workflow
for step in self.callback_pos:
current_task = current_task[step]
if callable(current_task):
return get_func_info(current_task) | [
"def",
"get_current_task_info",
"(",
"self",
")",
":",
"name",
"=",
"self",
".",
"model",
".",
"workflow",
".",
"name",
"if",
"not",
"name",
":",
"return",
"current_task",
"=",
"workflows",
"[",
"name",
"]",
".",
"workflow",
"for",
"step",
"in",
"self",
".",
"callback_pos",
":",
"current_task",
"=",
"current_task",
"[",
"step",
"]",
"if",
"callable",
"(",
"current_task",
")",
":",
"return",
"get_func_info",
"(",
"current_task",
")"
] | Return dictionary of current task function info for this object. | [
"Return",
"dictionary",
"of",
"current",
"task",
"function",
"info",
"for",
"this",
"object",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L380-L390 | train |
rsgalloway/grit | grit/server/git_http_backend.py | BaseWSGIClass.canned_handlers | def canned_handlers(self, environ, start_response, code = '200', headers = []):
'''
We convert an error code into
certain action over start_response and return a WSGI-compliant payload.
'''
headerbase = [('Content-Type', 'text/plain')]
if headers:
hObj = Headers(headerbase)
for header in headers:
hObj[header[0]] = '; '.join(header[1:])
start_response(self.canned_collection[code], headerbase)
return [''] | python | def canned_handlers(self, environ, start_response, code = '200', headers = []):
'''
We convert an error code into
certain action over start_response and return a WSGI-compliant payload.
'''
headerbase = [('Content-Type', 'text/plain')]
if headers:
hObj = Headers(headerbase)
for header in headers:
hObj[header[0]] = '; '.join(header[1:])
start_response(self.canned_collection[code], headerbase)
return [''] | [
"def",
"canned_handlers",
"(",
"self",
",",
"environ",
",",
"start_response",
",",
"code",
"=",
"'200'",
",",
"headers",
"=",
"[",
"]",
")",
":",
"headerbase",
"=",
"[",
"(",
"'Content-Type'",
",",
"'text/plain'",
")",
"]",
"if",
"headers",
":",
"hObj",
"=",
"Headers",
"(",
"headerbase",
")",
"for",
"header",
"in",
"headers",
":",
"hObj",
"[",
"header",
"[",
"0",
"]",
"]",
"=",
"'; '",
".",
"join",
"(",
"header",
"[",
"1",
":",
"]",
")",
"start_response",
"(",
"self",
".",
"canned_collection",
"[",
"code",
"]",
",",
"headerbase",
")",
"return",
"[",
"''",
"]"
] | We convert an error code into
certain action over start_response and return a WSGI-compliant payload. | [
"We",
"convert",
"an",
"error",
"code",
"into",
"certain",
"action",
"over",
"start_response",
"and",
"return",
"a",
"WSGI",
"-",
"compliant",
"payload",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/git_http_backend.py#L79-L90 | train |
cloudmesh-cmd3/cmd3 | cmd3/plugins/shell_scope.py | shell_scope.info_shell_scope | def info_shell_scope(self):
"""prints some information about the shell scope"""
Console.ok("{:>20} = {:}".format("ECHO", self.echo))
Console.ok("{:>20} = {:}".format("DEBUG", self.debug))
Console.ok("{:>20} = {:}".format("LOGLEVEL", self.loglevel))
Console.ok("{:>20} = {:}".format("SCOPE", self.active_scope))
Console.ok("{:>20} = {:}".format("SCOPES", self.scopes))
Console.ok("{:>20} = {:}".format("SCOPELESS", self.scopeless))
Console.ok("{:>20} = {:}".format("prompt", self.prompt))
Console.ok("{:>20} = {:}".format("scripts", self.scripts))
Console.ok("{:>20} = {:}".format("variables", self.variables)) | python | def info_shell_scope(self):
"""prints some information about the shell scope"""
Console.ok("{:>20} = {:}".format("ECHO", self.echo))
Console.ok("{:>20} = {:}".format("DEBUG", self.debug))
Console.ok("{:>20} = {:}".format("LOGLEVEL", self.loglevel))
Console.ok("{:>20} = {:}".format("SCOPE", self.active_scope))
Console.ok("{:>20} = {:}".format("SCOPES", self.scopes))
Console.ok("{:>20} = {:}".format("SCOPELESS", self.scopeless))
Console.ok("{:>20} = {:}".format("prompt", self.prompt))
Console.ok("{:>20} = {:}".format("scripts", self.scripts))
Console.ok("{:>20} = {:}".format("variables", self.variables)) | [
"def",
"info_shell_scope",
"(",
"self",
")",
":",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"ECHO\"",
",",
"self",
".",
"echo",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"DEBUG\"",
",",
"self",
".",
"debug",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"LOGLEVEL\"",
",",
"self",
".",
"loglevel",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"SCOPE\"",
",",
"self",
".",
"active_scope",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"SCOPES\"",
",",
"self",
".",
"scopes",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"SCOPELESS\"",
",",
"self",
".",
"scopeless",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"prompt\"",
",",
"self",
".",
"prompt",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"scripts\"",
",",
"self",
".",
"scripts",
")",
")",
"Console",
".",
"ok",
"(",
"\"{:>20} = {:}\"",
".",
"format",
"(",
"\"variables\"",
",",
"self",
".",
"variables",
")",
")"
] | prints some information about the shell scope | [
"prints",
"some",
"information",
"about",
"the",
"shell",
"scope"
] | 92e33c96032fd3921f159198a0e57917c4dc34ed | https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/shell_scope.py#L28-L38 | train |
cloudmesh-cmd3/cmd3 | cmd3/plugins/shell_scope.py | shell_scope.activate_shell_scope | def activate_shell_scope(self):
"""activates the shell scope"""
self.variables = {}
self.prompt = 'cm> '
self.active_scope = ""
self.scopes = []
self.scopeless = ['load', 'info', 'var', 'use', 'quit', 'q', 'help'] | python | def activate_shell_scope(self):
"""activates the shell scope"""
self.variables = {}
self.prompt = 'cm> '
self.active_scope = ""
self.scopes = []
self.scopeless = ['load', 'info', 'var', 'use', 'quit', 'q', 'help'] | [
"def",
"activate_shell_scope",
"(",
"self",
")",
":",
"self",
".",
"variables",
"=",
"{",
"}",
"self",
".",
"prompt",
"=",
"'cm> '",
"self",
".",
"active_scope",
"=",
"\"\"",
"self",
".",
"scopes",
"=",
"[",
"]",
"self",
".",
"scopeless",
"=",
"[",
"'load'",
",",
"'info'",
",",
"'var'",
",",
"'use'",
",",
"'quit'",
",",
"'q'",
",",
"'help'",
"]"
] | activates the shell scope | [
"activates",
"the",
"shell",
"scope"
] | 92e33c96032fd3921f159198a0e57917c4dc34ed | https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/shell_scope.py#L40-L46 | train |
BernardFW/bernard | src/bernard/middleware/_manager.py | Caller._build_stack | def _build_stack(self) -> List[Callable]:
"""
Generates the stack of functions to call. It looks at the ordered list
of all middlewares and only keeps those which have the method we're
trying to call.
"""
stack = []
for m in self.manager.middlewares:
try:
stack.append(getattr(m(self), self.name))
except AttributeError:
pass
return stack | python | def _build_stack(self) -> List[Callable]:
"""
Generates the stack of functions to call. It looks at the ordered list
of all middlewares and only keeps those which have the method we're
trying to call.
"""
stack = []
for m in self.manager.middlewares:
try:
stack.append(getattr(m(self), self.name))
except AttributeError:
pass
return stack | [
"def",
"_build_stack",
"(",
"self",
")",
"->",
"List",
"[",
"Callable",
"]",
":",
"stack",
"=",
"[",
"]",
"for",
"m",
"in",
"self",
".",
"manager",
".",
"middlewares",
":",
"try",
":",
"stack",
".",
"append",
"(",
"getattr",
"(",
"m",
"(",
"self",
")",
",",
"self",
".",
"name",
")",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"stack"
] | Generates the stack of functions to call. It looks at the ordered list
of all middlewares and only keeps those which have the method we're
trying to call. | [
"Generates",
"the",
"stack",
"of",
"functions",
"to",
"call",
".",
"It",
"looks",
"at",
"the",
"ordered",
"list",
"of",
"all",
"middlewares",
"and",
"only",
"keeps",
"those",
"which",
"have",
"the",
"method",
"we",
"re",
"trying",
"to",
"call",
"."
] | 9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab | https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L53-L68 | train |
BernardFW/bernard | src/bernard/middleware/_manager.py | MiddlewareManager.instance | def instance(cls) -> 'MiddlewareManager':
"""
Creates, initializes and returns a unique MiddlewareManager instance.
"""
if cls._instance is None:
cls._instance = cls()
cls._instance.init()
return cls._instance | python | def instance(cls) -> 'MiddlewareManager':
"""
Creates, initializes and returns a unique MiddlewareManager instance.
"""
if cls._instance is None:
cls._instance = cls()
cls._instance.init()
return cls._instance | [
"def",
"instance",
"(",
"cls",
")",
"->",
"'MiddlewareManager'",
":",
"if",
"cls",
".",
"_instance",
"is",
"None",
":",
"cls",
".",
"_instance",
"=",
"cls",
"(",
")",
"cls",
".",
"_instance",
".",
"init",
"(",
")",
"return",
"cls",
".",
"_instance"
] | Creates, initializes and returns a unique MiddlewareManager instance. | [
"Creates",
"initializes",
"and",
"returns",
"a",
"unique",
"MiddlewareManager",
"instance",
"."
] | 9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab | https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L124-L132 | train |
BernardFW/bernard | src/bernard/middleware/_manager.py | MiddlewareManager.health_check | def health_check(cls):
"""
Checks that the configuration makes sense.
"""
try:
assert isinstance(settings.MIDDLEWARES, list)
except AssertionError:
yield HealthCheckFail(
'00005',
'The "MIDDLEWARES" configuration key should be assigned '
'to a list',
)
return
for m in settings.MIDDLEWARES:
try:
c = import_class(m)
except (TypeError, ValueError, AttributeError, ImportError):
yield HealthCheckFail(
'00005',
f'Cannot import middleware "{m}"',
)
else:
if not issubclass(c, BaseMiddleware):
yield HealthCheckFail(
'00005',
f'Middleware "{m}" does not implement '
f'"BaseMiddleware"',
) | python | def health_check(cls):
"""
Checks that the configuration makes sense.
"""
try:
assert isinstance(settings.MIDDLEWARES, list)
except AssertionError:
yield HealthCheckFail(
'00005',
'The "MIDDLEWARES" configuration key should be assigned '
'to a list',
)
return
for m in settings.MIDDLEWARES:
try:
c = import_class(m)
except (TypeError, ValueError, AttributeError, ImportError):
yield HealthCheckFail(
'00005',
f'Cannot import middleware "{m}"',
)
else:
if not issubclass(c, BaseMiddleware):
yield HealthCheckFail(
'00005',
f'Middleware "{m}" does not implement '
f'"BaseMiddleware"',
) | [
"def",
"health_check",
"(",
"cls",
")",
":",
"try",
":",
"assert",
"isinstance",
"(",
"settings",
".",
"MIDDLEWARES",
",",
"list",
")",
"except",
"AssertionError",
":",
"yield",
"HealthCheckFail",
"(",
"'00005'",
",",
"'The \"MIDDLEWARES\" configuration key should be assigned '",
"'to a list'",
",",
")",
"return",
"for",
"m",
"in",
"settings",
".",
"MIDDLEWARES",
":",
"try",
":",
"c",
"=",
"import_class",
"(",
"m",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
",",
"AttributeError",
",",
"ImportError",
")",
":",
"yield",
"HealthCheckFail",
"(",
"'00005'",
",",
"f'Cannot import middleware \"{m}\"'",
",",
")",
"else",
":",
"if",
"not",
"issubclass",
"(",
"c",
",",
"BaseMiddleware",
")",
":",
"yield",
"HealthCheckFail",
"(",
"'00005'",
",",
"f'Middleware \"{m}\" does not implement '",
"f'\"BaseMiddleware\"'",
",",
")"
] | Checks that the configuration makes sense. | [
"Checks",
"that",
"the",
"configuration",
"makes",
"sense",
"."
] | 9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab | https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L135-L164 | train |
BernardFW/bernard | src/bernard/middleware/_manager.py | MiddlewareManager.get | def get(self, name: Text, final: C) -> C:
"""
Get the function to call which will run all middlewares.
:param name: Name of the function to be called
:param final: Function to call at the bottom of the stack (that's the
one provided by the implementer).
:return:
"""
# noinspection PyTypeChecker
return Caller(self, name, final) | python | def get(self, name: Text, final: C) -> C:
"""
Get the function to call which will run all middlewares.
:param name: Name of the function to be called
:param final: Function to call at the bottom of the stack (that's the
one provided by the implementer).
:return:
"""
# noinspection PyTypeChecker
return Caller(self, name, final) | [
"def",
"get",
"(",
"self",
",",
"name",
":",
"Text",
",",
"final",
":",
"C",
")",
"->",
"C",
":",
"# noinspection PyTypeChecker",
"return",
"Caller",
"(",
"self",
",",
"name",
",",
"final",
")"
] | Get the function to call which will run all middlewares.
:param name: Name of the function to be called
:param final: Function to call at the bottom of the stack (that's the
one provided by the implementer).
:return: | [
"Get",
"the",
"function",
"to",
"call",
"which",
"will",
"run",
"all",
"middlewares",
"."
] | 9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab | https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L173-L184 | train |
openvax/varlens | varlens/loci_util.py | load_from_args | def load_from_args(args):
"""
Return a Loci object giving the loci specified on the command line.
If no loci-related arguments are specified, return None. This makes it
possible to distinguish an empty set of loci, for example due to filters
removing all loci, from the case where the user didn't specify any
arguments.
"""
if not args.locus:
return None
loci_iterator = (Locus.parse(locus) for locus in args.locus)
# if args.neighbor_offsets:
# loci_iterator = expand_with_neighbors(
# loci_iterator, args.neighbor_offsets)
return Loci(loci_iterator) | python | def load_from_args(args):
"""
Return a Loci object giving the loci specified on the command line.
If no loci-related arguments are specified, return None. This makes it
possible to distinguish an empty set of loci, for example due to filters
removing all loci, from the case where the user didn't specify any
arguments.
"""
if not args.locus:
return None
loci_iterator = (Locus.parse(locus) for locus in args.locus)
# if args.neighbor_offsets:
# loci_iterator = expand_with_neighbors(
# loci_iterator, args.neighbor_offsets)
return Loci(loci_iterator) | [
"def",
"load_from_args",
"(",
"args",
")",
":",
"if",
"not",
"args",
".",
"locus",
":",
"return",
"None",
"loci_iterator",
"=",
"(",
"Locus",
".",
"parse",
"(",
"locus",
")",
"for",
"locus",
"in",
"args",
".",
"locus",
")",
"# if args.neighbor_offsets:",
"# loci_iterator = expand_with_neighbors(",
"# loci_iterator, args.neighbor_offsets)",
"return",
"Loci",
"(",
"loci_iterator",
")"
] | Return a Loci object giving the loci specified on the command line.
If no loci-related arguments are specified, return None. This makes it
possible to distinguish an empty set of loci, for example due to filters
removing all loci, from the case where the user didn't specify any
arguments. | [
"Return",
"a",
"Loci",
"object",
"giving",
"the",
"loci",
"specified",
"on",
"the",
"command",
"line",
"."
] | 715d3ede5893757b2fcba4117515621bca7b1e5d | https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/loci_util.py#L30-L48 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.format_date | def format_date(cls, timestamp):
"""
Creates a string representing the date information provided by the
given `timestamp` object.
"""
if not timestamp:
raise DateTimeFormatterException('timestamp must a valid string {}'.format(timestamp))
return timestamp.strftime(cls.DATE_FORMAT) | python | def format_date(cls, timestamp):
"""
Creates a string representing the date information provided by the
given `timestamp` object.
"""
if not timestamp:
raise DateTimeFormatterException('timestamp must a valid string {}'.format(timestamp))
return timestamp.strftime(cls.DATE_FORMAT) | [
"def",
"format_date",
"(",
"cls",
",",
"timestamp",
")",
":",
"if",
"not",
"timestamp",
":",
"raise",
"DateTimeFormatterException",
"(",
"'timestamp must a valid string {}'",
".",
"format",
"(",
"timestamp",
")",
")",
"return",
"timestamp",
".",
"strftime",
"(",
"cls",
".",
"DATE_FORMAT",
")"
] | Creates a string representing the date information provided by the
given `timestamp` object. | [
"Creates",
"a",
"string",
"representing",
"the",
"date",
"information",
"provided",
"by",
"the",
"given",
"timestamp",
"object",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L30-L38 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.format_datetime | def format_datetime(cls, timestamp):
"""
Creates a string representing the date and time information provided by
the given `timestamp` object.
"""
if not timestamp:
raise DateTimeFormatterException('timestamp must a valid string {}'.format(timestamp))
return timestamp.strftime(cls.DATETIME_FORMAT) | python | def format_datetime(cls, timestamp):
"""
Creates a string representing the date and time information provided by
the given `timestamp` object.
"""
if not timestamp:
raise DateTimeFormatterException('timestamp must a valid string {}'.format(timestamp))
return timestamp.strftime(cls.DATETIME_FORMAT) | [
"def",
"format_datetime",
"(",
"cls",
",",
"timestamp",
")",
":",
"if",
"not",
"timestamp",
":",
"raise",
"DateTimeFormatterException",
"(",
"'timestamp must a valid string {}'",
".",
"format",
"(",
"timestamp",
")",
")",
"return",
"timestamp",
".",
"strftime",
"(",
"cls",
".",
"DATETIME_FORMAT",
")"
] | Creates a string representing the date and time information provided by
the given `timestamp` object. | [
"Creates",
"a",
"string",
"representing",
"the",
"date",
"and",
"time",
"information",
"provided",
"by",
"the",
"given",
"timestamp",
"object",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L41-L49 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.extract_date | def extract_date(cls, date_str):
"""
Tries to extract a `datetime` object from the given string, expecting
date information only.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not date_str:
raise DateTimeFormatterException('date_str must a valid string {}.'.format(date_str))
try:
return cls._extract_timestamp(date_str, cls.DATE_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid date string {}.'.format(date_str)) | python | def extract_date(cls, date_str):
"""
Tries to extract a `datetime` object from the given string, expecting
date information only.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not date_str:
raise DateTimeFormatterException('date_str must a valid string {}.'.format(date_str))
try:
return cls._extract_timestamp(date_str, cls.DATE_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid date string {}.'.format(date_str)) | [
"def",
"extract_date",
"(",
"cls",
",",
"date_str",
")",
":",
"if",
"not",
"date_str",
":",
"raise",
"DateTimeFormatterException",
"(",
"'date_str must a valid string {}.'",
".",
"format",
"(",
"date_str",
")",
")",
"try",
":",
"return",
"cls",
".",
"_extract_timestamp",
"(",
"date_str",
",",
"cls",
".",
"DATE_FORMAT",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"DateTimeFormatterException",
"(",
"'Invalid date string {}.'",
".",
"format",
"(",
"date_str",
")",
")"
] | Tries to extract a `datetime` object from the given string, expecting
date information only.
Raises `DateTimeFormatterException` if the extraction fails. | [
"Tries",
"to",
"extract",
"a",
"datetime",
"object",
"from",
"the",
"given",
"string",
"expecting",
"date",
"information",
"only",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L52-L65 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.extract_datetime | def extract_datetime(cls, datetime_str):
"""
Tries to extract a `datetime` object from the given string, including
time information.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not datetime_str:
raise DateTimeFormatterException('datetime_str must a valid string')
try:
return cls._extract_timestamp(datetime_str, cls.DATETIME_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid datetime string {}.'.format(datetime_str)) | python | def extract_datetime(cls, datetime_str):
"""
Tries to extract a `datetime` object from the given string, including
time information.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not datetime_str:
raise DateTimeFormatterException('datetime_str must a valid string')
try:
return cls._extract_timestamp(datetime_str, cls.DATETIME_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid datetime string {}.'.format(datetime_str)) | [
"def",
"extract_datetime",
"(",
"cls",
",",
"datetime_str",
")",
":",
"if",
"not",
"datetime_str",
":",
"raise",
"DateTimeFormatterException",
"(",
"'datetime_str must a valid string'",
")",
"try",
":",
"return",
"cls",
".",
"_extract_timestamp",
"(",
"datetime_str",
",",
"cls",
".",
"DATETIME_FORMAT",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"DateTimeFormatterException",
"(",
"'Invalid datetime string {}.'",
".",
"format",
"(",
"datetime_str",
")",
")"
] | Tries to extract a `datetime` object from the given string, including
time information.
Raises `DateTimeFormatterException` if the extraction fails. | [
"Tries",
"to",
"extract",
"a",
"datetime",
"object",
"from",
"the",
"given",
"string",
"including",
"time",
"information",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L68-L81 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.extract_datetime_hour | def extract_datetime_hour(cls, datetime_str):
"""
Tries to extract a `datetime` object from the given string, including only hours.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not datetime_str:
raise DateTimeFormatterException('datetime_str must a valid string')
try:
return cls._extract_timestamp(datetime_str, cls.DATETIME_HOUR_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid datetime string {}.'.format(datetime_str)) | python | def extract_datetime_hour(cls, datetime_str):
"""
Tries to extract a `datetime` object from the given string, including only hours.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not datetime_str:
raise DateTimeFormatterException('datetime_str must a valid string')
try:
return cls._extract_timestamp(datetime_str, cls.DATETIME_HOUR_FORMAT)
except (TypeError, ValueError):
raise DateTimeFormatterException('Invalid datetime string {}.'.format(datetime_str)) | [
"def",
"extract_datetime_hour",
"(",
"cls",
",",
"datetime_str",
")",
":",
"if",
"not",
"datetime_str",
":",
"raise",
"DateTimeFormatterException",
"(",
"'datetime_str must a valid string'",
")",
"try",
":",
"return",
"cls",
".",
"_extract_timestamp",
"(",
"datetime_str",
",",
"cls",
".",
"DATETIME_HOUR_FORMAT",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"DateTimeFormatterException",
"(",
"'Invalid datetime string {}.'",
".",
"format",
"(",
"datetime_str",
")",
")"
] | Tries to extract a `datetime` object from the given string, including only hours.
Raises `DateTimeFormatterException` if the extraction fails. | [
"Tries",
"to",
"extract",
"a",
"datetime",
"object",
"from",
"the",
"given",
"string",
"including",
"only",
"hours",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L84-L96 | train |
polyaxon/hestia | hestia/date_formatter.py | DateTimeFormatter.extract | def extract(cls, timestamp_str):
"""
Tries to extract a `datetime` object from the given string. First the
datetime format is tried, if it fails, the date format is used for
extraction.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not timestamp_str:
raise DateTimeFormatterException(
'timestamp_str must a valid string {}'.format(timestamp_str))
if isinstance(timestamp_str, (date, datetime)):
return timestamp_str
try:
return cls.extract_datetime(timestamp_str)
except DateTimeFormatterException:
pass
try:
return cls.extract_datetime_hour(timestamp_str)
except DateTimeFormatterException:
pass
try:
return cls.extract_date(timestamp_str)
except DateTimeFormatterException as e:
raise DateTimeFormatterException(e) | python | def extract(cls, timestamp_str):
"""
Tries to extract a `datetime` object from the given string. First the
datetime format is tried, if it fails, the date format is used for
extraction.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not timestamp_str:
raise DateTimeFormatterException(
'timestamp_str must a valid string {}'.format(timestamp_str))
if isinstance(timestamp_str, (date, datetime)):
return timestamp_str
try:
return cls.extract_datetime(timestamp_str)
except DateTimeFormatterException:
pass
try:
return cls.extract_datetime_hour(timestamp_str)
except DateTimeFormatterException:
pass
try:
return cls.extract_date(timestamp_str)
except DateTimeFormatterException as e:
raise DateTimeFormatterException(e) | [
"def",
"extract",
"(",
"cls",
",",
"timestamp_str",
")",
":",
"if",
"not",
"timestamp_str",
":",
"raise",
"DateTimeFormatterException",
"(",
"'timestamp_str must a valid string {}'",
".",
"format",
"(",
"timestamp_str",
")",
")",
"if",
"isinstance",
"(",
"timestamp_str",
",",
"(",
"date",
",",
"datetime",
")",
")",
":",
"return",
"timestamp_str",
"try",
":",
"return",
"cls",
".",
"extract_datetime",
"(",
"timestamp_str",
")",
"except",
"DateTimeFormatterException",
":",
"pass",
"try",
":",
"return",
"cls",
".",
"extract_datetime_hour",
"(",
"timestamp_str",
")",
"except",
"DateTimeFormatterException",
":",
"pass",
"try",
":",
"return",
"cls",
".",
"extract_date",
"(",
"timestamp_str",
")",
"except",
"DateTimeFormatterException",
"as",
"e",
":",
"raise",
"DateTimeFormatterException",
"(",
"e",
")"
] | Tries to extract a `datetime` object from the given string. First the
datetime format is tried, if it fails, the date format is used for
extraction.
Raises `DateTimeFormatterException` if the extraction fails. | [
"Tries",
"to",
"extract",
"a",
"datetime",
"object",
"from",
"the",
"given",
"string",
".",
"First",
"the",
"datetime",
"format",
"is",
"tried",
"if",
"it",
"fails",
"the",
"date",
"format",
"is",
"used",
"for",
"extraction",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L99-L127 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/tasks.py | restart | def restart(uuid, **kwargs):
"""Restart the workflow from a given workflow engine UUID."""
from .worker_engine import restart_worker
return text_type(restart_worker(uuid, **kwargs).uuid) | python | def restart(uuid, **kwargs):
"""Restart the workflow from a given workflow engine UUID."""
from .worker_engine import restart_worker
return text_type(restart_worker(uuid, **kwargs).uuid) | [
"def",
"restart",
"(",
"uuid",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"worker_engine",
"import",
"restart_worker",
"return",
"text_type",
"(",
"restart_worker",
"(",
"uuid",
",",
"*",
"*",
"kwargs",
")",
".",
"uuid",
")"
] | Restart the workflow from a given workflow engine UUID. | [
"Restart",
"the",
"workflow",
"from",
"a",
"given",
"workflow",
"engine",
"UUID",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/tasks.py#L107-L110 | train |
polyaxon/hestia | hestia/imports.py | import_submodules | def import_submodules(context, root_module, path):
"""
Import all submodules and register them in the ``context`` namespace.
>>> import_submodules(locals(), __name__, __path__)
"""
for _, module_name, _ in pkgutil.walk_packages(path, root_module + '.'):
# this causes a Runtime error with model conflicts
# module = loader.find_module(module_name).load_module(module_name)
module = __import__(module_name, globals(), locals(), ['__name__'])
for k, v in vars(module).items():
if not k.startswith('_'):
context[k] = v
context[module_name] = module | python | def import_submodules(context, root_module, path):
"""
Import all submodules and register them in the ``context`` namespace.
>>> import_submodules(locals(), __name__, __path__)
"""
for _, module_name, _ in pkgutil.walk_packages(path, root_module + '.'):
# this causes a Runtime error with model conflicts
# module = loader.find_module(module_name).load_module(module_name)
module = __import__(module_name, globals(), locals(), ['__name__'])
for k, v in vars(module).items():
if not k.startswith('_'):
context[k] = v
context[module_name] = module | [
"def",
"import_submodules",
"(",
"context",
",",
"root_module",
",",
"path",
")",
":",
"for",
"_",
",",
"module_name",
",",
"_",
"in",
"pkgutil",
".",
"walk_packages",
"(",
"path",
",",
"root_module",
"+",
"'.'",
")",
":",
"# this causes a Runtime error with model conflicts",
"# module = loader.find_module(module_name).load_module(module_name)",
"module",
"=",
"__import__",
"(",
"module_name",
",",
"globals",
"(",
")",
",",
"locals",
"(",
")",
",",
"[",
"'__name__'",
"]",
")",
"for",
"k",
",",
"v",
"in",
"vars",
"(",
"module",
")",
".",
"items",
"(",
")",
":",
"if",
"not",
"k",
".",
"startswith",
"(",
"'_'",
")",
":",
"context",
"[",
"k",
"]",
"=",
"v",
"context",
"[",
"module_name",
"]",
"=",
"module"
] | Import all submodules and register them in the ``context`` namespace.
>>> import_submodules(locals(), __name__, __path__) | [
"Import",
"all",
"submodules",
"and",
"register",
"them",
"in",
"the",
"context",
"namespace",
"."
] | 382ed139cff8bf35c987cfc30a31b72c0d6b808e | https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/imports.py#L36-L49 | train |
cloudmesh-cmd3/cmd3 | cmd3/shell.py | command | def command(func):
'''
A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator
'''
classname = inspect.getouterframes(inspect.currentframe())[1][3]
name = func.__name__
help_name = name.replace("do_", "help_")
doc = textwrap.dedent(func.__doc__)
def new(instance, args):
# instance.new.__doc__ = doc
try:
argv = shlex.split(args)
arguments = docopt(doc, help=True, argv=argv)
func(instance, args, arguments)
except SystemExit:
if args not in ('-h', '--help'):
Console.error("Could not execute the command.")
print(doc)
new.__doc__ = doc
return new | python | def command(func):
'''
A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator
'''
classname = inspect.getouterframes(inspect.currentframe())[1][3]
name = func.__name__
help_name = name.replace("do_", "help_")
doc = textwrap.dedent(func.__doc__)
def new(instance, args):
# instance.new.__doc__ = doc
try:
argv = shlex.split(args)
arguments = docopt(doc, help=True, argv=argv)
func(instance, args, arguments)
except SystemExit:
if args not in ('-h', '--help'):
Console.error("Could not execute the command.")
print(doc)
new.__doc__ = doc
return new | [
"def",
"command",
"(",
"func",
")",
":",
"classname",
"=",
"inspect",
".",
"getouterframes",
"(",
"inspect",
".",
"currentframe",
"(",
")",
")",
"[",
"1",
"]",
"[",
"3",
"]",
"name",
"=",
"func",
".",
"__name__",
"help_name",
"=",
"name",
".",
"replace",
"(",
"\"do_\"",
",",
"\"help_\"",
")",
"doc",
"=",
"textwrap",
".",
"dedent",
"(",
"func",
".",
"__doc__",
")",
"def",
"new",
"(",
"instance",
",",
"args",
")",
":",
"# instance.new.__doc__ = doc",
"try",
":",
"argv",
"=",
"shlex",
".",
"split",
"(",
"args",
")",
"arguments",
"=",
"docopt",
"(",
"doc",
",",
"help",
"=",
"True",
",",
"argv",
"=",
"argv",
")",
"func",
"(",
"instance",
",",
"args",
",",
"arguments",
")",
"except",
"SystemExit",
":",
"if",
"args",
"not",
"in",
"(",
"'-h'",
",",
"'--help'",
")",
":",
"Console",
".",
"error",
"(",
"\"Could not execute the command.\"",
")",
"print",
"(",
"doc",
")",
"new",
".",
"__doc__",
"=",
"doc",
"return",
"new"
] | A decorator to create a function with docopt arguments. It also generates a help function
@command
def do_myfunc(self, args):
""" docopts text """
pass
will create
def do_myfunc(self, args, arguments):
""" docopts text """
...
def help_myfunc(self, args, arguments):
... prints the docopt text ...
:param func: the function for the decorator | [
"A",
"decorator",
"to",
"create",
"a",
"function",
"with",
"docopt",
"arguments",
".",
"It",
"also",
"generates",
"a",
"help",
"function"
] | 92e33c96032fd3921f159198a0e57917c4dc34ed | https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/shell.py#L224-L260 | train |
rsgalloway/grit | grit/repo/version.py | ItemsMixin.addFile | def addFile(self, path, msg=""):
"""Adds a file to the version"""
item = Item.from_path(repo=self.repo, path=path)
self.addItem(item) | python | def addFile(self, path, msg=""):
"""Adds a file to the version"""
item = Item.from_path(repo=self.repo, path=path)
self.addItem(item) | [
"def",
"addFile",
"(",
"self",
",",
"path",
",",
"msg",
"=",
"\"\"",
")",
":",
"item",
"=",
"Item",
".",
"from_path",
"(",
"repo",
"=",
"self",
".",
"repo",
",",
"path",
"=",
"path",
")",
"self",
".",
"addItem",
"(",
"item",
")"
] | Adds a file to the version | [
"Adds",
"a",
"file",
"to",
"the",
"version"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L32-L35 | train |
rsgalloway/grit | grit/repo/version.py | ItemsMixin.addItem | def addItem(self, item):
"""Adds an item if the tree is mutable"""
try:
self.tree.addItem(item)
except AttributeError, e:
raise VersionError('Saved versions are immutable') | python | def addItem(self, item):
"""Adds an item if the tree is mutable"""
try:
self.tree.addItem(item)
except AttributeError, e:
raise VersionError('Saved versions are immutable') | [
"def",
"addItem",
"(",
"self",
",",
"item",
")",
":",
"try",
":",
"self",
".",
"tree",
".",
"addItem",
"(",
"item",
")",
"except",
"AttributeError",
",",
"e",
":",
"raise",
"VersionError",
"(",
"'Saved versions are immutable'",
")"
] | Adds an item if the tree is mutable | [
"Adds",
"an",
"item",
"if",
"the",
"tree",
"is",
"mutable"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L37-L42 | train |
rsgalloway/grit | grit/repo/version.py | ItemsMixin.removeItem | def removeItem(self, item):
"""Removes an item if the tree is mutable"""
try:
self.tree.removeItem(item)
except AttributeError, e:
raise VersionError('Saved versions are immutable') | python | def removeItem(self, item):
"""Removes an item if the tree is mutable"""
try:
self.tree.removeItem(item)
except AttributeError, e:
raise VersionError('Saved versions are immutable') | [
"def",
"removeItem",
"(",
"self",
",",
"item",
")",
":",
"try",
":",
"self",
".",
"tree",
".",
"removeItem",
"(",
"item",
")",
"except",
"AttributeError",
",",
"e",
":",
"raise",
"VersionError",
"(",
"'Saved versions are immutable'",
")"
] | Removes an item if the tree is mutable | [
"Removes",
"an",
"item",
"if",
"the",
"tree",
"is",
"mutable"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L44-L49 | train |
rsgalloway/grit | grit/repo/version.py | ItemsMixin.iteritems | def iteritems(self):
"""Generator that yields Items"""
if self.type in ['blob']:
raise StopIteration
for path, mode, sha in self.tree.iteritems():
item = Item(self, sha, path, mode)
yield item
for i in item.iteritems():
yield i | python | def iteritems(self):
"""Generator that yields Items"""
if self.type in ['blob']:
raise StopIteration
for path, mode, sha in self.tree.iteritems():
item = Item(self, sha, path, mode)
yield item
for i in item.iteritems():
yield i | [
"def",
"iteritems",
"(",
"self",
")",
":",
"if",
"self",
".",
"type",
"in",
"[",
"'blob'",
"]",
":",
"raise",
"StopIteration",
"for",
"path",
",",
"mode",
",",
"sha",
"in",
"self",
".",
"tree",
".",
"iteritems",
"(",
")",
":",
"item",
"=",
"Item",
"(",
"self",
",",
"sha",
",",
"path",
",",
"mode",
")",
"yield",
"item",
"for",
"i",
"in",
"item",
".",
"iteritems",
"(",
")",
":",
"yield",
"i"
] | Generator that yields Items | [
"Generator",
"that",
"yields",
"Items"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L51-L60 | train |
rsgalloway/grit | grit/repo/version.py | ItemsMixin.items | def items(self, path=None):
"""
Returns set of items.
:param path: Regex filter on item path.
:return: List of Item class objects.
"""
items = list(self.iteritems())
if path is not None:
path += '$'
regex = re.compile(path)
items = [i for i in items if regex.match(i.path)]
return items | python | def items(self, path=None):
"""
Returns set of items.
:param path: Regex filter on item path.
:return: List of Item class objects.
"""
items = list(self.iteritems())
if path is not None:
path += '$'
regex = re.compile(path)
items = [i for i in items if regex.match(i.path)]
return items | [
"def",
"items",
"(",
"self",
",",
"path",
"=",
"None",
")",
":",
"items",
"=",
"list",
"(",
"self",
".",
"iteritems",
"(",
")",
")",
"if",
"path",
"is",
"not",
"None",
":",
"path",
"+=",
"'$'",
"regex",
"=",
"re",
".",
"compile",
"(",
"path",
")",
"items",
"=",
"[",
"i",
"for",
"i",
"in",
"items",
"if",
"regex",
".",
"match",
"(",
"i",
".",
"path",
")",
"]",
"return",
"items"
] | Returns set of items.
:param path: Regex filter on item path.
:return: List of Item class objects. | [
"Returns",
"set",
"of",
"items",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L63-L76 | train |
rsgalloway/grit | grit/repo/version.py | Item._get_blob | def _get_blob(self):
"""read blob on access only because get_object is slow"""
if not self.__blob:
self.__blob = self.repo.get_object(self.id)
return self.__blob | python | def _get_blob(self):
"""read blob on access only because get_object is slow"""
if not self.__blob:
self.__blob = self.repo.get_object(self.id)
return self.__blob | [
"def",
"_get_blob",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__blob",
":",
"self",
".",
"__blob",
"=",
"self",
".",
"repo",
".",
"get_object",
"(",
"self",
".",
"id",
")",
"return",
"self",
".",
"__blob"
] | read blob on access only because get_object is slow | [
"read",
"blob",
"on",
"access",
"only",
"because",
"get_object",
"is",
"slow"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L124-L128 | train |
rsgalloway/grit | grit/repo/version.py | Item.from_path | def from_path(self, repo, path, name=None):
"""
Create a new Item from a file path.
:param repo: Repo object.
:param path: File path.
:param name: Name of item (to override original file name).
:return: New Item class instance.
"""
if name is None:
name = os.path.basename(path)
#FIXME: hack, there has to be a better way
return Item.from_string(repo=repo, name=name, string=open(path).read()) | python | def from_path(self, repo, path, name=None):
"""
Create a new Item from a file path.
:param repo: Repo object.
:param path: File path.
:param name: Name of item (to override original file name).
:return: New Item class instance.
"""
if name is None:
name = os.path.basename(path)
#FIXME: hack, there has to be a better way
return Item.from_string(repo=repo, name=name, string=open(path).read()) | [
"def",
"from_path",
"(",
"self",
",",
"repo",
",",
"path",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
"#FIXME: hack, there has to be a better way",
"return",
"Item",
".",
"from_string",
"(",
"repo",
"=",
"repo",
",",
"name",
"=",
"name",
",",
"string",
"=",
"open",
"(",
"path",
")",
".",
"read",
"(",
")",
")"
] | Create a new Item from a file path.
:param repo: Repo object.
:param path: File path.
:param name: Name of item (to override original file name).
:return: New Item class instance. | [
"Create",
"a",
"new",
"Item",
"from",
"a",
"file",
"path",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L146-L159 | train |
rsgalloway/grit | grit/repo/version.py | Item.from_string | def from_string(self, repo, name, string):
"""
Create a new Item from a data stream.
:param repo: Repo object.
:param name: Name of item.
:param data: Data stream.
:return: New Item class instance.
"""
try:
log.debug('Creating new item: %s' % name)
blob = Blob.from_string(string)
item = Item(parent=repo, sha=blob.sha, path=name)
item.blob = blob
return item
except AssertionError, e:
raise ItemError(e) | python | def from_string(self, repo, name, string):
"""
Create a new Item from a data stream.
:param repo: Repo object.
:param name: Name of item.
:param data: Data stream.
:return: New Item class instance.
"""
try:
log.debug('Creating new item: %s' % name)
blob = Blob.from_string(string)
item = Item(parent=repo, sha=blob.sha, path=name)
item.blob = blob
return item
except AssertionError, e:
raise ItemError(e) | [
"def",
"from_string",
"(",
"self",
",",
"repo",
",",
"name",
",",
"string",
")",
":",
"try",
":",
"log",
".",
"debug",
"(",
"'Creating new item: %s'",
"%",
"name",
")",
"blob",
"=",
"Blob",
".",
"from_string",
"(",
"string",
")",
"item",
"=",
"Item",
"(",
"parent",
"=",
"repo",
",",
"sha",
"=",
"blob",
".",
"sha",
",",
"path",
"=",
"name",
")",
"item",
".",
"blob",
"=",
"blob",
"return",
"item",
"except",
"AssertionError",
",",
"e",
":",
"raise",
"ItemError",
"(",
"e",
")"
] | Create a new Item from a data stream.
:param repo: Repo object.
:param name: Name of item.
:param data: Data stream.
:return: New Item class instance. | [
"Create",
"a",
"new",
"Item",
"from",
"a",
"data",
"stream",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L162-L179 | train |
rsgalloway/grit | grit/repo/version.py | Item.save | def save(self, msg=None):
"""
Modify item data and commit to repo.
Git objects are immutable, to save means adding a new item
:param msg: Commit message.
"""
if msg is None:
msg = 'Saving %s' % self.name
log.debug(msg)
self.repo.addItem(self, msg) | python | def save(self, msg=None):
"""
Modify item data and commit to repo.
Git objects are immutable, to save means adding a new item
:param msg: Commit message.
"""
if msg is None:
msg = 'Saving %s' % self.name
log.debug(msg)
self.repo.addItem(self, msg) | [
"def",
"save",
"(",
"self",
",",
"msg",
"=",
"None",
")",
":",
"if",
"msg",
"is",
"None",
":",
"msg",
"=",
"'Saving %s'",
"%",
"self",
".",
"name",
"log",
".",
"debug",
"(",
"msg",
")",
"self",
".",
"repo",
".",
"addItem",
"(",
"self",
",",
"msg",
")"
] | Modify item data and commit to repo.
Git objects are immutable, to save means adding a new item
:param msg: Commit message. | [
"Modify",
"item",
"data",
"and",
"commit",
"to",
"repo",
".",
"Git",
"objects",
"are",
"immutable",
"to",
"save",
"means",
"adding",
"a",
"new",
"item"
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L193-L203 | train |
rsgalloway/grit | grit/repo/version.py | Item.checkout | def checkout(self, path):
"""
Check out file data to path.
:param path: Filesystem path to check out item to.
:return: True if successful.
"""
if os.path.isdir(path):
path = os.path.join(path, self.name)
try:
log.debug('Checking out %s to %s' %(self.path, path))
f = open(path, 'w')
f.write(self.data())
f.close()
return True
except Exception, e:
raise ItemError(e) | python | def checkout(self, path):
"""
Check out file data to path.
:param path: Filesystem path to check out item to.
:return: True if successful.
"""
if os.path.isdir(path):
path = os.path.join(path, self.name)
try:
log.debug('Checking out %s to %s' %(self.path, path))
f = open(path, 'w')
f.write(self.data())
f.close()
return True
except Exception, e:
raise ItemError(e) | [
"def",
"checkout",
"(",
"self",
",",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"self",
".",
"name",
")",
"try",
":",
"log",
".",
"debug",
"(",
"'Checking out %s to %s'",
"%",
"(",
"self",
".",
"path",
",",
"path",
")",
")",
"f",
"=",
"open",
"(",
"path",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"self",
".",
"data",
"(",
")",
")",
"f",
".",
"close",
"(",
")",
"return",
"True",
"except",
"Exception",
",",
"e",
":",
"raise",
"ItemError",
"(",
"e",
")"
] | Check out file data to path.
:param path: Filesystem path to check out item to.
:return: True if successful. | [
"Check",
"out",
"file",
"data",
"to",
"path",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L205-L222 | train |
rsgalloway/grit | grit/repo/version.py | Version.save | def save(self, message):
"""
Add version to repo object store, set repo head to version sha.
:param message: Message string.
"""
self.commit.message = message
self.commit.tree = self.tree
#TODO: store new blobs only
for item in self.tree.items():
self.repo.object_store.add_object(item.blob)
self.repo.object_store.add_object(self.tree)
# set HEAD to new commit
self.repo.object_store.add_object(self.commit)
self.repo.refs['refs/heads/master'] = self.commit.id | python | def save(self, message):
"""
Add version to repo object store, set repo head to version sha.
:param message: Message string.
"""
self.commit.message = message
self.commit.tree = self.tree
#TODO: store new blobs only
for item in self.tree.items():
self.repo.object_store.add_object(item.blob)
self.repo.object_store.add_object(self.tree)
# set HEAD to new commit
self.repo.object_store.add_object(self.commit)
self.repo.refs['refs/heads/master'] = self.commit.id | [
"def",
"save",
"(",
"self",
",",
"message",
")",
":",
"self",
".",
"commit",
".",
"message",
"=",
"message",
"self",
".",
"commit",
".",
"tree",
"=",
"self",
".",
"tree",
"#TODO: store new blobs only",
"for",
"item",
"in",
"self",
".",
"tree",
".",
"items",
"(",
")",
":",
"self",
".",
"repo",
".",
"object_store",
".",
"add_object",
"(",
"item",
".",
"blob",
")",
"self",
".",
"repo",
".",
"object_store",
".",
"add_object",
"(",
"self",
".",
"tree",
")",
"# set HEAD to new commit",
"self",
".",
"repo",
".",
"object_store",
".",
"add_object",
"(",
"self",
".",
"commit",
")",
"self",
".",
"repo",
".",
"refs",
"[",
"'refs/heads/master'",
"]",
"=",
"self",
".",
"commit",
".",
"id"
] | Add version to repo object store, set repo head to version sha.
:param message: Message string. | [
"Add",
"version",
"to",
"repo",
"object",
"store",
"set",
"repo",
"head",
"to",
"version",
"sha",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L336-L351 | train |
rsgalloway/grit | grit/repo/version.py | Version.new | def new(self, repo):
"""
Create a new version of a repo.Local object.
:param repo: Instance of repo.Local.
:return: New Version instance.
"""
#TODO: subclass Commit, pass parent as init param
try:
# create new commit instance and set metadata
commit = Commit()
author = os.environ.get('USER')
commit.author = commit.committer = author
commit.commit_time = commit.author_time = int(time())
tz = parse_timezone('-0200')[0]
commit.commit_timezone = commit.author_timezone = tz
commit.encoding = "UTF-8"
commit.message = ''
# set previous version as parent to this one
parent = repo.versions(-1)
if parent:
commit.parents = [parent.id]
# create new tree, add entries from previous version
tree = Tree()
curr = repo.versions(-1)
if curr:
for item in curr.items():
tree.addItem(item)
commit.tree = tree.id
# create new version, and add tree
version = Version(repo=repo, commit=commit, tree=tree)
return version
except Exception, e:
traceback.print_exc()
return VersionError(e) | python | def new(self, repo):
"""
Create a new version of a repo.Local object.
:param repo: Instance of repo.Local.
:return: New Version instance.
"""
#TODO: subclass Commit, pass parent as init param
try:
# create new commit instance and set metadata
commit = Commit()
author = os.environ.get('USER')
commit.author = commit.committer = author
commit.commit_time = commit.author_time = int(time())
tz = parse_timezone('-0200')[0]
commit.commit_timezone = commit.author_timezone = tz
commit.encoding = "UTF-8"
commit.message = ''
# set previous version as parent to this one
parent = repo.versions(-1)
if parent:
commit.parents = [parent.id]
# create new tree, add entries from previous version
tree = Tree()
curr = repo.versions(-1)
if curr:
for item in curr.items():
tree.addItem(item)
commit.tree = tree.id
# create new version, and add tree
version = Version(repo=repo, commit=commit, tree=tree)
return version
except Exception, e:
traceback.print_exc()
return VersionError(e) | [
"def",
"new",
"(",
"self",
",",
"repo",
")",
":",
"#TODO: subclass Commit, pass parent as init param",
"try",
":",
"# create new commit instance and set metadata",
"commit",
"=",
"Commit",
"(",
")",
"author",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'USER'",
")",
"commit",
".",
"author",
"=",
"commit",
".",
"committer",
"=",
"author",
"commit",
".",
"commit_time",
"=",
"commit",
".",
"author_time",
"=",
"int",
"(",
"time",
"(",
")",
")",
"tz",
"=",
"parse_timezone",
"(",
"'-0200'",
")",
"[",
"0",
"]",
"commit",
".",
"commit_timezone",
"=",
"commit",
".",
"author_timezone",
"=",
"tz",
"commit",
".",
"encoding",
"=",
"\"UTF-8\"",
"commit",
".",
"message",
"=",
"''",
"# set previous version as parent to this one",
"parent",
"=",
"repo",
".",
"versions",
"(",
"-",
"1",
")",
"if",
"parent",
":",
"commit",
".",
"parents",
"=",
"[",
"parent",
".",
"id",
"]",
"# create new tree, add entries from previous version",
"tree",
"=",
"Tree",
"(",
")",
"curr",
"=",
"repo",
".",
"versions",
"(",
"-",
"1",
")",
"if",
"curr",
":",
"for",
"item",
"in",
"curr",
".",
"items",
"(",
")",
":",
"tree",
".",
"addItem",
"(",
"item",
")",
"commit",
".",
"tree",
"=",
"tree",
".",
"id",
"# create new version, and add tree",
"version",
"=",
"Version",
"(",
"repo",
"=",
"repo",
",",
"commit",
"=",
"commit",
",",
"tree",
"=",
"tree",
")",
"return",
"version",
"except",
"Exception",
",",
"e",
":",
"traceback",
".",
"print_exc",
"(",
")",
"return",
"VersionError",
"(",
"e",
")"
] | Create a new version of a repo.Local object.
:param repo: Instance of repo.Local.
:return: New Version instance. | [
"Create",
"a",
"new",
"version",
"of",
"a",
"repo",
".",
"Local",
"object",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L354-L393 | train |
rsgalloway/grit | grit/cmd/cli.py | confirm | def confirm(prompt=None, resp=False):
"""
Prompts user for confirmation.
:param prompt: String to display to user.
:param resp: Default response value.
:return: Boolean response from user, or default value.
"""
if prompt is None:
prompt = 'Confirm'
if resp:
prompt = '%s [%s]|%s: ' % (prompt, 'y', 'n')
else:
prompt = '%s [%s]|%s: ' % (prompt, 'n', 'y')
while True:
ans = raw_input(prompt)
if not ans:
return resp
if ans not in ['y', 'Y', 'n', 'N']:
print 'please enter y or n.'
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False | python | def confirm(prompt=None, resp=False):
"""
Prompts user for confirmation.
:param prompt: String to display to user.
:param resp: Default response value.
:return: Boolean response from user, or default value.
"""
if prompt is None:
prompt = 'Confirm'
if resp:
prompt = '%s [%s]|%s: ' % (prompt, 'y', 'n')
else:
prompt = '%s [%s]|%s: ' % (prompt, 'n', 'y')
while True:
ans = raw_input(prompt)
if not ans:
return resp
if ans not in ['y', 'Y', 'n', 'N']:
print 'please enter y or n.'
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False | [
"def",
"confirm",
"(",
"prompt",
"=",
"None",
",",
"resp",
"=",
"False",
")",
":",
"if",
"prompt",
"is",
"None",
":",
"prompt",
"=",
"'Confirm'",
"if",
"resp",
":",
"prompt",
"=",
"'%s [%s]|%s: '",
"%",
"(",
"prompt",
",",
"'y'",
",",
"'n'",
")",
"else",
":",
"prompt",
"=",
"'%s [%s]|%s: '",
"%",
"(",
"prompt",
",",
"'n'",
",",
"'y'",
")",
"while",
"True",
":",
"ans",
"=",
"raw_input",
"(",
"prompt",
")",
"if",
"not",
"ans",
":",
"return",
"resp",
"if",
"ans",
"not",
"in",
"[",
"'y'",
",",
"'Y'",
",",
"'n'",
",",
"'N'",
"]",
":",
"print",
"'please enter y or n.'",
"continue",
"if",
"ans",
"==",
"'y'",
"or",
"ans",
"==",
"'Y'",
":",
"return",
"True",
"if",
"ans",
"==",
"'n'",
"or",
"ans",
"==",
"'N'",
":",
"return",
"False"
] | Prompts user for confirmation.
:param prompt: String to display to user.
:param resp: Default response value.
:return: Boolean response from user, or default value. | [
"Prompts",
"user",
"for",
"confirmation",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L17-L44 | train |
rsgalloway/grit | grit/cmd/cli.py | prompt | def prompt(name, default):
"""
Prompts user for raw input.
:return: Raw input value from user.
"""
value = raw_input('%s [%s]: ' %(name, default))
if not value:
value = default
return value | python | def prompt(name, default):
"""
Prompts user for raw input.
:return: Raw input value from user.
"""
value = raw_input('%s [%s]: ' %(name, default))
if not value:
value = default
return value | [
"def",
"prompt",
"(",
"name",
",",
"default",
")",
":",
"value",
"=",
"raw_input",
"(",
"'%s [%s]: '",
"%",
"(",
"name",
",",
"default",
")",
")",
"if",
"not",
"value",
":",
"value",
"=",
"default",
"return",
"value"
] | Prompts user for raw input.
:return: Raw input value from user. | [
"Prompts",
"user",
"for",
"raw",
"input",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L46-L55 | train |
rsgalloway/grit | grit/cmd/cli.py | new | def new(url):
"""
Creates a new Repo class instance at url.
:param url: URL of new repo
:return: new Repo class instance.
"""
from grit import Repo
return Repo.new(url=url, bare=True) | python | def new(url):
"""
Creates a new Repo class instance at url.
:param url: URL of new repo
:return: new Repo class instance.
"""
from grit import Repo
return Repo.new(url=url, bare=True) | [
"def",
"new",
"(",
"url",
")",
":",
"from",
"grit",
"import",
"Repo",
"return",
"Repo",
".",
"new",
"(",
"url",
"=",
"url",
",",
"bare",
"=",
"True",
")"
] | Creates a new Repo class instance at url.
:param url: URL of new repo
:return: new Repo class instance. | [
"Creates",
"a",
"new",
"Repo",
"class",
"instance",
"at",
"url",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L57-L66 | train |
rsgalloway/grit | grit/cmd/cli.py | checkout | def checkout(url, version=None):
"""
Checks out latest version of item or repository.
:param url: URL of repo or item to check out.
:param version: Version number to check out.
"""
from grit import Repo
r = Repo(url)
def _write(item):
log.debug('writing: %s' % item.name)
if item.type != 'blob':
return
if r.type in ['repo', 'proxy', 'local']:
path = os.path.join(r.name, item.path)
pdir = os.path.dirname(path)
if not os.path.isdir(pdir):
os.makedirs(pdir)
else:
path = item.name
f = open(path, 'w')
f.write(item.data())
f.close()
if r.type == 'blob':
_write(r)
else:
items = r.items()
count = 1
total = len(items)
while count <= total:
print '[%s/%s] %0.2f%%' %(count, total, (float(count) / total) * 100), '*'*count, '\r',
_write(items[count-1])
count += 1
sys.stdout.flush()
print | python | def checkout(url, version=None):
"""
Checks out latest version of item or repository.
:param url: URL of repo or item to check out.
:param version: Version number to check out.
"""
from grit import Repo
r = Repo(url)
def _write(item):
log.debug('writing: %s' % item.name)
if item.type != 'blob':
return
if r.type in ['repo', 'proxy', 'local']:
path = os.path.join(r.name, item.path)
pdir = os.path.dirname(path)
if not os.path.isdir(pdir):
os.makedirs(pdir)
else:
path = item.name
f = open(path, 'w')
f.write(item.data())
f.close()
if r.type == 'blob':
_write(r)
else:
items = r.items()
count = 1
total = len(items)
while count <= total:
print '[%s/%s] %0.2f%%' %(count, total, (float(count) / total) * 100), '*'*count, '\r',
_write(items[count-1])
count += 1
sys.stdout.flush()
print | [
"def",
"checkout",
"(",
"url",
",",
"version",
"=",
"None",
")",
":",
"from",
"grit",
"import",
"Repo",
"r",
"=",
"Repo",
"(",
"url",
")",
"def",
"_write",
"(",
"item",
")",
":",
"log",
".",
"debug",
"(",
"'writing: %s'",
"%",
"item",
".",
"name",
")",
"if",
"item",
".",
"type",
"!=",
"'blob'",
":",
"return",
"if",
"r",
".",
"type",
"in",
"[",
"'repo'",
",",
"'proxy'",
",",
"'local'",
"]",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"r",
".",
"name",
",",
"item",
".",
"path",
")",
"pdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"pdir",
")",
":",
"os",
".",
"makedirs",
"(",
"pdir",
")",
"else",
":",
"path",
"=",
"item",
".",
"name",
"f",
"=",
"open",
"(",
"path",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"item",
".",
"data",
"(",
")",
")",
"f",
".",
"close",
"(",
")",
"if",
"r",
".",
"type",
"==",
"'blob'",
":",
"_write",
"(",
"r",
")",
"else",
":",
"items",
"=",
"r",
".",
"items",
"(",
")",
"count",
"=",
"1",
"total",
"=",
"len",
"(",
"items",
")",
"while",
"count",
"<=",
"total",
":",
"print",
"'[%s/%s] %0.2f%%'",
"%",
"(",
"count",
",",
"total",
",",
"(",
"float",
"(",
"count",
")",
"/",
"total",
")",
"*",
"100",
")",
",",
"'*'",
"*",
"count",
",",
"'\\r'",
",",
"_write",
"(",
"items",
"[",
"count",
"-",
"1",
"]",
")",
"count",
"+=",
"1",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"print"
] | Checks out latest version of item or repository.
:param url: URL of repo or item to check out.
:param version: Version number to check out. | [
"Checks",
"out",
"latest",
"version",
"of",
"item",
"or",
"repository",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L68-L105 | train |
rsgalloway/grit | grit/cmd/cli.py | checkin | def checkin(url, files, message=None):
"""
Check in files to a repository.
:param url: URL of repo to check files into.
:param message: Optional commit message.
"""
from grit import Repo, Item
r = Repo(url)
if not files:
raise GritError('No files')
def _write(path):
item = Item.from_path(repo=r, path=path)
if r.isLocal():
v.addItem(item=item)
else:
r.upload(filename=os.path.basename(path), filedata=open(path, 'r').read())
if r.isLocal():
v = r.addVersion()
count = 1
total = len(files)
while count <= total:
print '[%s/%s] %0.2f%%' %(count, total, (float(count) / total) * 100), '*'*count, '\r',
_write(os.path.abspath(files[count-1]))
count += 1
sys.stdout.flush()
if message is None:
message = 'Publishing %s' % ', '.join(files)
if r.isLocal():
v.save(message=message)
print | python | def checkin(url, files, message=None):
"""
Check in files to a repository.
:param url: URL of repo to check files into.
:param message: Optional commit message.
"""
from grit import Repo, Item
r = Repo(url)
if not files:
raise GritError('No files')
def _write(path):
item = Item.from_path(repo=r, path=path)
if r.isLocal():
v.addItem(item=item)
else:
r.upload(filename=os.path.basename(path), filedata=open(path, 'r').read())
if r.isLocal():
v = r.addVersion()
count = 1
total = len(files)
while count <= total:
print '[%s/%s] %0.2f%%' %(count, total, (float(count) / total) * 100), '*'*count, '\r',
_write(os.path.abspath(files[count-1]))
count += 1
sys.stdout.flush()
if message is None:
message = 'Publishing %s' % ', '.join(files)
if r.isLocal():
v.save(message=message)
print | [
"def",
"checkin",
"(",
"url",
",",
"files",
",",
"message",
"=",
"None",
")",
":",
"from",
"grit",
"import",
"Repo",
",",
"Item",
"r",
"=",
"Repo",
"(",
"url",
")",
"if",
"not",
"files",
":",
"raise",
"GritError",
"(",
"'No files'",
")",
"def",
"_write",
"(",
"path",
")",
":",
"item",
"=",
"Item",
".",
"from_path",
"(",
"repo",
"=",
"r",
",",
"path",
"=",
"path",
")",
"if",
"r",
".",
"isLocal",
"(",
")",
":",
"v",
".",
"addItem",
"(",
"item",
"=",
"item",
")",
"else",
":",
"r",
".",
"upload",
"(",
"filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
",",
"filedata",
"=",
"open",
"(",
"path",
",",
"'r'",
")",
".",
"read",
"(",
")",
")",
"if",
"r",
".",
"isLocal",
"(",
")",
":",
"v",
"=",
"r",
".",
"addVersion",
"(",
")",
"count",
"=",
"1",
"total",
"=",
"len",
"(",
"files",
")",
"while",
"count",
"<=",
"total",
":",
"print",
"'[%s/%s] %0.2f%%'",
"%",
"(",
"count",
",",
"total",
",",
"(",
"float",
"(",
"count",
")",
"/",
"total",
")",
"*",
"100",
")",
",",
"'*'",
"*",
"count",
",",
"'\\r'",
",",
"_write",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"files",
"[",
"count",
"-",
"1",
"]",
")",
")",
"count",
"+=",
"1",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"if",
"message",
"is",
"None",
":",
"message",
"=",
"'Publishing %s'",
"%",
"', '",
".",
"join",
"(",
"files",
")",
"if",
"r",
".",
"isLocal",
"(",
")",
":",
"v",
".",
"save",
"(",
"message",
"=",
"message",
")",
"print"
] | Check in files to a repository.
:param url: URL of repo to check files into.
:param message: Optional commit message. | [
"Check",
"in",
"files",
"to",
"a",
"repository",
"."
] | e6434ad8a1f4ac5d0903ebad630c81f8a5164d78 | https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L107-L140 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel.get | def get(cls, **kwargs):
"""Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2)
"""
fields = {}
for field in cls.url_fields:
value = kwargs.pop(field, None)
if value is None:
cls._handle_wrong_field(field, ATTR_TYPE_URL)
fields[field] = value
# Create an instance of the model class and make the GET request
model = cls(**fields)
model._populate(**kwargs)
return model | python | def get(cls, **kwargs):
"""Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2)
"""
fields = {}
for field in cls.url_fields:
value = kwargs.pop(field, None)
if value is None:
cls._handle_wrong_field(field, ATTR_TYPE_URL)
fields[field] = value
# Create an instance of the model class and make the GET request
model = cls(**fields)
model._populate(**kwargs)
return model | [
"def",
"get",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"fields",
"=",
"{",
"}",
"for",
"field",
"in",
"cls",
".",
"url_fields",
":",
"value",
"=",
"kwargs",
".",
"pop",
"(",
"field",
",",
"None",
")",
"if",
"value",
"is",
"None",
":",
"cls",
".",
"_handle_wrong_field",
"(",
"field",
",",
"ATTR_TYPE_URL",
")",
"fields",
"[",
"field",
"]",
"=",
"value",
"# Create an instance of the model class and make the GET request",
"model",
"=",
"cls",
"(",
"*",
"*",
"fields",
")",
"model",
".",
"_populate",
"(",
"*",
"*",
"kwargs",
")",
"return",
"model"
] | Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2) | [
"Retrieve",
"an",
"object",
"by",
"making",
"a",
"GET",
"request",
"to",
"Transifex",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L109-L139 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel.save | def save(self, **fields):
"""Save the instance to the remote Transifex server.
If it was pre-populated, it updates the instance on the server,
otherwise it creates a new object.
Any values given in `fields` will be attempted to be saved
on the object. The same goes for any other values already set
to the object by `model_instance.attr = value`.
Raises:
AttributeError: if a given field is not included in
`self.writable_fields`,
"""
for field in fields:
if field in self.writable_fields:
setattr(self, field, fields[field])
else:
self._handle_wrong_field(field, ATTR_TYPE_WRITE)
if self._populated_fields:
self._update(**self._modified_fields)
else:
self._create(**self._modified_fields) | python | def save(self, **fields):
"""Save the instance to the remote Transifex server.
If it was pre-populated, it updates the instance on the server,
otherwise it creates a new object.
Any values given in `fields` will be attempted to be saved
on the object. The same goes for any other values already set
to the object by `model_instance.attr = value`.
Raises:
AttributeError: if a given field is not included in
`self.writable_fields`,
"""
for field in fields:
if field in self.writable_fields:
setattr(self, field, fields[field])
else:
self._handle_wrong_field(field, ATTR_TYPE_WRITE)
if self._populated_fields:
self._update(**self._modified_fields)
else:
self._create(**self._modified_fields) | [
"def",
"save",
"(",
"self",
",",
"*",
"*",
"fields",
")",
":",
"for",
"field",
"in",
"fields",
":",
"if",
"field",
"in",
"self",
".",
"writable_fields",
":",
"setattr",
"(",
"self",
",",
"field",
",",
"fields",
"[",
"field",
"]",
")",
"else",
":",
"self",
".",
"_handle_wrong_field",
"(",
"field",
",",
"ATTR_TYPE_WRITE",
")",
"if",
"self",
".",
"_populated_fields",
":",
"self",
".",
"_update",
"(",
"*",
"*",
"self",
".",
"_modified_fields",
")",
"else",
":",
"self",
".",
"_create",
"(",
"*",
"*",
"self",
".",
"_modified_fields",
")"
] | Save the instance to the remote Transifex server.
If it was pre-populated, it updates the instance on the server,
otherwise it creates a new object.
Any values given in `fields` will be attempted to be saved
on the object. The same goes for any other values already set
to the object by `model_instance.attr = value`.
Raises:
AttributeError: if a given field is not included in
`self.writable_fields`, | [
"Save",
"the",
"instance",
"to",
"the",
"remote",
"Transifex",
"server",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L213-L236 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel._get | def _get(self, **kwargs):
"""Get the resource from a remote Transifex server."""
path = self._construct_path_to_item()
return self._http.get(path) | python | def _get(self, **kwargs):
"""Get the resource from a remote Transifex server."""
path = self._construct_path_to_item()
return self._http.get(path) | [
"def",
"_get",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"self",
".",
"_construct_path_to_item",
"(",
")",
"return",
"self",
".",
"_http",
".",
"get",
"(",
"path",
")"
] | Get the resource from a remote Transifex server. | [
"Get",
"the",
"resource",
"from",
"a",
"remote",
"Transifex",
"server",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L246-L249 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel._create | def _create(self, **kwargs):
"""Create a resource in the remote Transifex server."""
path = self._construct_path_to_collection()
# Use the fields for which we have values
for field in self.writable_fields:
try:
value = getattr(self, field)
kwargs[field] = value
except AttributeError:
pass
return self._http.post(path, json.dumps(kwargs)) | python | def _create(self, **kwargs):
"""Create a resource in the remote Transifex server."""
path = self._construct_path_to_collection()
# Use the fields for which we have values
for field in self.writable_fields:
try:
value = getattr(self, field)
kwargs[field] = value
except AttributeError:
pass
return self._http.post(path, json.dumps(kwargs)) | [
"def",
"_create",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"self",
".",
"_construct_path_to_collection",
"(",
")",
"# Use the fields for which we have values",
"for",
"field",
"in",
"self",
".",
"writable_fields",
":",
"try",
":",
"value",
"=",
"getattr",
"(",
"self",
",",
"field",
")",
"kwargs",
"[",
"field",
"]",
"=",
"value",
"except",
"AttributeError",
":",
"pass",
"return",
"self",
".",
"_http",
".",
"post",
"(",
"path",
",",
"json",
".",
"dumps",
"(",
"kwargs",
")",
")"
] | Create a resource in the remote Transifex server. | [
"Create",
"a",
"resource",
"in",
"the",
"remote",
"Transifex",
"server",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L251-L262 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel._update | def _update(self, **kwargs):
"""Update a resource in a remote Transifex server."""
path = self._construct_path_to_item()
if not kwargs:
return
return self._http.put(path, json.dumps(kwargs)) | python | def _update(self, **kwargs):
"""Update a resource in a remote Transifex server."""
path = self._construct_path_to_item()
if not kwargs:
return
return self._http.put(path, json.dumps(kwargs)) | [
"def",
"_update",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"self",
".",
"_construct_path_to_item",
"(",
")",
"if",
"not",
"kwargs",
":",
"return",
"return",
"self",
".",
"_http",
".",
"put",
"(",
"path",
",",
"json",
".",
"dumps",
"(",
"kwargs",
")",
")"
] | Update a resource in a remote Transifex server. | [
"Update",
"a",
"resource",
"in",
"a",
"remote",
"Transifex",
"server",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L264-L269 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel._delete | def _delete(self, **kwargs):
"""Delete a resource from a remote Transifex server."""
path = self._construct_path_to_item()
return self._http.delete(path) | python | def _delete(self, **kwargs):
"""Delete a resource from a remote Transifex server."""
path = self._construct_path_to_item()
return self._http.delete(path) | [
"def",
"_delete",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"self",
".",
"_construct_path_to_item",
"(",
")",
"return",
"self",
".",
"_http",
".",
"delete",
"(",
"path",
")"
] | Delete a resource from a remote Transifex server. | [
"Delete",
"a",
"resource",
"from",
"a",
"remote",
"Transifex",
"server",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L271-L274 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel.get_url_parameters | def get_url_parameters(self):
"""Create a dictionary of parameters used in URLs for this model."""
url_fields = {}
for field in self.url_fields:
url_fields[field] = getattr(self, field)
return url_fields | python | def get_url_parameters(self):
"""Create a dictionary of parameters used in URLs for this model."""
url_fields = {}
for field in self.url_fields:
url_fields[field] = getattr(self, field)
return url_fields | [
"def",
"get_url_parameters",
"(",
"self",
")",
":",
"url_fields",
"=",
"{",
"}",
"for",
"field",
"in",
"self",
".",
"url_fields",
":",
"url_fields",
"[",
"field",
"]",
"=",
"getattr",
"(",
"self",
",",
"field",
")",
"return",
"url_fields"
] | Create a dictionary of parameters used in URLs for this model. | [
"Create",
"a",
"dictionary",
"of",
"parameters",
"used",
"in",
"URLs",
"for",
"this",
"model",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L285-L290 | train |
transifex/transifex-python-library | txlib/api/base.py | BaseModel._handle_wrong_field | def _handle_wrong_field(cls, field_name, field_type):
"""Raise an exception whenever an invalid attribute with
the given name was attempted to be set to or retrieved from
this model class.
Assumes that the given field is invalid, without making any checks.
Also adds an entry to the logs.
"""
if field_type == ATTR_TYPE_READ:
field_type = 'readable'
elif field_type == ATTR_TYPE_WRITE:
field_type = 'writable'
elif field_type == ATTR_TYPE_URL:
field_type = 'URL'
else:
raise AttributeError('Invalid attribute type: {}'.format(
field_type
))
msg = '{} has no {} attribute "{}"'.format(
cls.__name__,
field_type,
field_name
)
_logger.error(msg)
raise AttributeError(msg) | python | def _handle_wrong_field(cls, field_name, field_type):
"""Raise an exception whenever an invalid attribute with
the given name was attempted to be set to or retrieved from
this model class.
Assumes that the given field is invalid, without making any checks.
Also adds an entry to the logs.
"""
if field_type == ATTR_TYPE_READ:
field_type = 'readable'
elif field_type == ATTR_TYPE_WRITE:
field_type = 'writable'
elif field_type == ATTR_TYPE_URL:
field_type = 'URL'
else:
raise AttributeError('Invalid attribute type: {}'.format(
field_type
))
msg = '{} has no {} attribute "{}"'.format(
cls.__name__,
field_type,
field_name
)
_logger.error(msg)
raise AttributeError(msg) | [
"def",
"_handle_wrong_field",
"(",
"cls",
",",
"field_name",
",",
"field_type",
")",
":",
"if",
"field_type",
"==",
"ATTR_TYPE_READ",
":",
"field_type",
"=",
"'readable'",
"elif",
"field_type",
"==",
"ATTR_TYPE_WRITE",
":",
"field_type",
"=",
"'writable'",
"elif",
"field_type",
"==",
"ATTR_TYPE_URL",
":",
"field_type",
"=",
"'URL'",
"else",
":",
"raise",
"AttributeError",
"(",
"'Invalid attribute type: {}'",
".",
"format",
"(",
"field_type",
")",
")",
"msg",
"=",
"'{} has no {} attribute \"{}\"'",
".",
"format",
"(",
"cls",
".",
"__name__",
",",
"field_type",
",",
"field_name",
")",
"_logger",
".",
"error",
"(",
"msg",
")",
"raise",
"AttributeError",
"(",
"msg",
")"
] | Raise an exception whenever an invalid attribute with
the given name was attempted to be set to or retrieved from
this model class.
Assumes that the given field is invalid, without making any checks.
Also adds an entry to the logs. | [
"Raise",
"an",
"exception",
"whenever",
"an",
"invalid",
"attribute",
"with",
"the",
"given",
"name",
"was",
"attempted",
"to",
"be",
"set",
"to",
"or",
"retrieved",
"from",
"this",
"model",
"class",
"."
] | 9fea86b718973de35ccca6d54bd1f445c9632406 | https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L307-L333 | train |
peopledoc/mock-services | mock_services/rules.py | update_http_rules | def update_http_rules(rules, content_type='text/plain'):
"""Adds rules to global http mock.
It permits to set mock in a more global way than decorators, cf.:
https://github.com/openstack/requests-mock
Here we assume urls in the passed dict are regex we recompile before adding
a rule.
Rules example:
>>> def fake_duckduckgo_cb(request):
... return 200, {}, 'Coincoin!'
>>> rules = [
{
'method': 'GET',
'status_code': 200,
'text': 'I am watching you',
'url': r'^https://www.google.com/#q='
},
{
'method': 'GET',
'text': fake_duckduckgo_cb,
'url': r'^https://duckduckgo.com/?q='
},
]
"""
for kw in deepcopy(rules):
kw['url'] = re.compile(kw['url'])
# ensure headers dict for at least have a default content type
if 'Content-Type' not in kw.get('headers', {}):
kw['headers'] = dict(kw.get('headers', {}), **{
'Content-Type': content_type,
})
method = kw.pop('method')
url = kw.pop('url')
http_mock.register_uri(method, url, **kw) | python | def update_http_rules(rules, content_type='text/plain'):
"""Adds rules to global http mock.
It permits to set mock in a more global way than decorators, cf.:
https://github.com/openstack/requests-mock
Here we assume urls in the passed dict are regex we recompile before adding
a rule.
Rules example:
>>> def fake_duckduckgo_cb(request):
... return 200, {}, 'Coincoin!'
>>> rules = [
{
'method': 'GET',
'status_code': 200,
'text': 'I am watching you',
'url': r'^https://www.google.com/#q='
},
{
'method': 'GET',
'text': fake_duckduckgo_cb,
'url': r'^https://duckduckgo.com/?q='
},
]
"""
for kw in deepcopy(rules):
kw['url'] = re.compile(kw['url'])
# ensure headers dict for at least have a default content type
if 'Content-Type' not in kw.get('headers', {}):
kw['headers'] = dict(kw.get('headers', {}), **{
'Content-Type': content_type,
})
method = kw.pop('method')
url = kw.pop('url')
http_mock.register_uri(method, url, **kw) | [
"def",
"update_http_rules",
"(",
"rules",
",",
"content_type",
"=",
"'text/plain'",
")",
":",
"for",
"kw",
"in",
"deepcopy",
"(",
"rules",
")",
":",
"kw",
"[",
"'url'",
"]",
"=",
"re",
".",
"compile",
"(",
"kw",
"[",
"'url'",
"]",
")",
"# ensure headers dict for at least have a default content type",
"if",
"'Content-Type'",
"not",
"in",
"kw",
".",
"get",
"(",
"'headers'",
",",
"{",
"}",
")",
":",
"kw",
"[",
"'headers'",
"]",
"=",
"dict",
"(",
"kw",
".",
"get",
"(",
"'headers'",
",",
"{",
"}",
")",
",",
"*",
"*",
"{",
"'Content-Type'",
":",
"content_type",
",",
"}",
")",
"method",
"=",
"kw",
".",
"pop",
"(",
"'method'",
")",
"url",
"=",
"kw",
".",
"pop",
"(",
"'url'",
")",
"http_mock",
".",
"register_uri",
"(",
"method",
",",
"url",
",",
"*",
"*",
"kw",
")"
] | Adds rules to global http mock.
It permits to set mock in a more global way than decorators, cf.:
https://github.com/openstack/requests-mock
Here we assume urls in the passed dict are regex we recompile before adding
a rule.
Rules example:
>>> def fake_duckduckgo_cb(request):
... return 200, {}, 'Coincoin!'
>>> rules = [
{
'method': 'GET',
'status_code': 200,
'text': 'I am watching you',
'url': r'^https://www.google.com/#q='
},
{
'method': 'GET',
'text': fake_duckduckgo_cb,
'url': r'^https://duckduckgo.com/?q='
},
] | [
"Adds",
"rules",
"to",
"global",
"http",
"mock",
"."
] | fd3838280df8869725b538768357435eedf299c1 | https://github.com/peopledoc/mock-services/blob/fd3838280df8869725b538768357435eedf299c1/mock_services/rules.py#L33-L75 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/utils.py | get_task_history | def get_task_history(last_task):
"""Append last task to task history."""
if hasattr(last_task, 'branch') and last_task.branch:
return
elif hasattr(last_task, 'hide') and last_task.hide:
return
else:
return get_func_info(last_task) | python | def get_task_history(last_task):
"""Append last task to task history."""
if hasattr(last_task, 'branch') and last_task.branch:
return
elif hasattr(last_task, 'hide') and last_task.hide:
return
else:
return get_func_info(last_task) | [
"def",
"get_task_history",
"(",
"last_task",
")",
":",
"if",
"hasattr",
"(",
"last_task",
",",
"'branch'",
")",
"and",
"last_task",
".",
"branch",
":",
"return",
"elif",
"hasattr",
"(",
"last_task",
",",
"'hide'",
")",
"and",
"last_task",
".",
"hide",
":",
"return",
"else",
":",
"return",
"get_func_info",
"(",
"last_task",
")"
] | Append last task to task history. | [
"Append",
"last",
"task",
"to",
"task",
"history",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L30-L37 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/utils.py | get_func_info | def get_func_info(func):
"""Retrieve a function's information."""
name = func.__name__
doc = func.__doc__ or ""
try:
nicename = func.description
except AttributeError:
if doc:
nicename = doc.split('\n')[0]
if len(nicename) > 80:
nicename = name
else:
nicename = name
parameters = []
try:
closure = func.func_closure
except AttributeError:
closure = func.__closure__
try:
varnames = func.func_code.co_freevars
except AttributeError:
varnames = func.__code__.co_freevars
if closure:
for index, arg in enumerate(closure):
if not callable(arg.cell_contents):
parameters.append((varnames[index],
text_type(arg.cell_contents)))
return ({
"nicename": nicename,
"doc": doc,
"parameters": parameters,
"name": name,
"time": str(datetime.datetime.now()),
"hostname": socket.gethostname(),
}) | python | def get_func_info(func):
"""Retrieve a function's information."""
name = func.__name__
doc = func.__doc__ or ""
try:
nicename = func.description
except AttributeError:
if doc:
nicename = doc.split('\n')[0]
if len(nicename) > 80:
nicename = name
else:
nicename = name
parameters = []
try:
closure = func.func_closure
except AttributeError:
closure = func.__closure__
try:
varnames = func.func_code.co_freevars
except AttributeError:
varnames = func.__code__.co_freevars
if closure:
for index, arg in enumerate(closure):
if not callable(arg.cell_contents):
parameters.append((varnames[index],
text_type(arg.cell_contents)))
return ({
"nicename": nicename,
"doc": doc,
"parameters": parameters,
"name": name,
"time": str(datetime.datetime.now()),
"hostname": socket.gethostname(),
}) | [
"def",
"get_func_info",
"(",
"func",
")",
":",
"name",
"=",
"func",
".",
"__name__",
"doc",
"=",
"func",
".",
"__doc__",
"or",
"\"\"",
"try",
":",
"nicename",
"=",
"func",
".",
"description",
"except",
"AttributeError",
":",
"if",
"doc",
":",
"nicename",
"=",
"doc",
".",
"split",
"(",
"'\\n'",
")",
"[",
"0",
"]",
"if",
"len",
"(",
"nicename",
")",
">",
"80",
":",
"nicename",
"=",
"name",
"else",
":",
"nicename",
"=",
"name",
"parameters",
"=",
"[",
"]",
"try",
":",
"closure",
"=",
"func",
".",
"func_closure",
"except",
"AttributeError",
":",
"closure",
"=",
"func",
".",
"__closure__",
"try",
":",
"varnames",
"=",
"func",
".",
"func_code",
".",
"co_freevars",
"except",
"AttributeError",
":",
"varnames",
"=",
"func",
".",
"__code__",
".",
"co_freevars",
"if",
"closure",
":",
"for",
"index",
",",
"arg",
"in",
"enumerate",
"(",
"closure",
")",
":",
"if",
"not",
"callable",
"(",
"arg",
".",
"cell_contents",
")",
":",
"parameters",
".",
"append",
"(",
"(",
"varnames",
"[",
"index",
"]",
",",
"text_type",
"(",
"arg",
".",
"cell_contents",
")",
")",
")",
"return",
"(",
"{",
"\"nicename\"",
":",
"nicename",
",",
"\"doc\"",
":",
"doc",
",",
"\"parameters\"",
":",
"parameters",
",",
"\"name\"",
":",
"name",
",",
"\"time\"",
":",
"str",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
",",
"\"hostname\"",
":",
"socket",
".",
"gethostname",
"(",
")",
",",
"}",
")"
] | Retrieve a function's information. | [
"Retrieve",
"a",
"function",
"s",
"information",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L40-L75 | train |
inveniosoftware-contrib/invenio-workflows | invenio_workflows/utils.py | get_workflow_info | def get_workflow_info(func_list):
"""Return function info, go through lists recursively."""
funcs = []
for item in func_list:
if item is None:
continue
if isinstance(item, list):
funcs.append(get_workflow_info(item))
else:
funcs.append(get_func_info(item))
return funcs | python | def get_workflow_info(func_list):
"""Return function info, go through lists recursively."""
funcs = []
for item in func_list:
if item is None:
continue
if isinstance(item, list):
funcs.append(get_workflow_info(item))
else:
funcs.append(get_func_info(item))
return funcs | [
"def",
"get_workflow_info",
"(",
"func_list",
")",
":",
"funcs",
"=",
"[",
"]",
"for",
"item",
"in",
"func_list",
":",
"if",
"item",
"is",
"None",
":",
"continue",
"if",
"isinstance",
"(",
"item",
",",
"list",
")",
":",
"funcs",
".",
"append",
"(",
"get_workflow_info",
"(",
"item",
")",
")",
"else",
":",
"funcs",
".",
"append",
"(",
"get_func_info",
"(",
"item",
")",
")",
"return",
"funcs"
] | Return function info, go through lists recursively. | [
"Return",
"function",
"info",
"go",
"through",
"lists",
"recursively",
"."
] | 9c09fd29509a3db975ac2aba337e6760d8cfd3c2 | https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L78-L88 | train |
COALAIP/pycoalaip | coalaip/data_formats.py | _copy_context_into_mutable | def _copy_context_into_mutable(context):
"""Copy a properly formatted context into a mutable data structure.
"""
def make_mutable(val):
if isinstance(val, Mapping):
return dict(val)
else:
return val
if not isinstance(context, (str, Mapping)):
try:
return [make_mutable(val) for val in context]
except TypeError:
pass
return make_mutable(context) | python | def _copy_context_into_mutable(context):
"""Copy a properly formatted context into a mutable data structure.
"""
def make_mutable(val):
if isinstance(val, Mapping):
return dict(val)
else:
return val
if not isinstance(context, (str, Mapping)):
try:
return [make_mutable(val) for val in context]
except TypeError:
pass
return make_mutable(context) | [
"def",
"_copy_context_into_mutable",
"(",
"context",
")",
":",
"def",
"make_mutable",
"(",
"val",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"Mapping",
")",
":",
"return",
"dict",
"(",
"val",
")",
"else",
":",
"return",
"val",
"if",
"not",
"isinstance",
"(",
"context",
",",
"(",
"str",
",",
"Mapping",
")",
")",
":",
"try",
":",
"return",
"[",
"make_mutable",
"(",
"val",
")",
"for",
"val",
"in",
"context",
"]",
"except",
"TypeError",
":",
"pass",
"return",
"make_mutable",
"(",
"context",
")"
] | Copy a properly formatted context into a mutable data structure. | [
"Copy",
"a",
"properly",
"formatted",
"context",
"into",
"a",
"mutable",
"data",
"structure",
"."
] | cecc8f6ff4733f0525fafcee63647753e832f0be | https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/data_formats.py#L17-L31 | train |
seung-lab/EMAnnotationSchemas | emannotationschemas/models.py | make_dataset_models | def make_dataset_models(dataset, schemas_and_tables, metadata_dict = None, version: int = 1, include_contacts=False):
"""make all the models for a dataset
Parameters
----------
dataset: str
name of dataset
table_and_types: list[(schema_name, table_name)]
list of tuples with types and model names to make
metadata_dict:
a dictionary with keys of table_names and values of metadata dicts needed
version: str
version number to use for making models
include_contacts:
option to include the model for cell contacts
Returns
-------
dict
dictionary where keys are table_names and values are sqlalchemy Models
Raises
------
UnknownAnnotationTypeException
If a type is not a valid annotation type
"""
if metadata_dict is None:
metadata_dict={}
validate_types(schemas_and_tables)
dataset_dict = {}
cell_segment_model = make_cell_segment_model(dataset, version=version)
dataset_dict[root_model_name.lower()] = cell_segment_model
for schema_name, table_name in schemas_and_tables:
model_key = table_name
metadata = metadata_dict.get(table_name, None)
dataset_dict[model_key] = make_annotation_model(dataset,
schema_name,
table_name,
table_metadata=metadata,
version=version)
if include_contacts:
contact_model = make_annotation_model_from_schema(dataset,
'contact',
Contact,
version=version)
dataset_dict['contact'] = contact_model
return dataset_dict | python | def make_dataset_models(dataset, schemas_and_tables, metadata_dict = None, version: int = 1, include_contacts=False):
"""make all the models for a dataset
Parameters
----------
dataset: str
name of dataset
table_and_types: list[(schema_name, table_name)]
list of tuples with types and model names to make
metadata_dict:
a dictionary with keys of table_names and values of metadata dicts needed
version: str
version number to use for making models
include_contacts:
option to include the model for cell contacts
Returns
-------
dict
dictionary where keys are table_names and values are sqlalchemy Models
Raises
------
UnknownAnnotationTypeException
If a type is not a valid annotation type
"""
if metadata_dict is None:
metadata_dict={}
validate_types(schemas_and_tables)
dataset_dict = {}
cell_segment_model = make_cell_segment_model(dataset, version=version)
dataset_dict[root_model_name.lower()] = cell_segment_model
for schema_name, table_name in schemas_and_tables:
model_key = table_name
metadata = metadata_dict.get(table_name, None)
dataset_dict[model_key] = make_annotation_model(dataset,
schema_name,
table_name,
table_metadata=metadata,
version=version)
if include_contacts:
contact_model = make_annotation_model_from_schema(dataset,
'contact',
Contact,
version=version)
dataset_dict['contact'] = contact_model
return dataset_dict | [
"def",
"make_dataset_models",
"(",
"dataset",
",",
"schemas_and_tables",
",",
"metadata_dict",
"=",
"None",
",",
"version",
":",
"int",
"=",
"1",
",",
"include_contacts",
"=",
"False",
")",
":",
"if",
"metadata_dict",
"is",
"None",
":",
"metadata_dict",
"=",
"{",
"}",
"validate_types",
"(",
"schemas_and_tables",
")",
"dataset_dict",
"=",
"{",
"}",
"cell_segment_model",
"=",
"make_cell_segment_model",
"(",
"dataset",
",",
"version",
"=",
"version",
")",
"dataset_dict",
"[",
"root_model_name",
".",
"lower",
"(",
")",
"]",
"=",
"cell_segment_model",
"for",
"schema_name",
",",
"table_name",
"in",
"schemas_and_tables",
":",
"model_key",
"=",
"table_name",
"metadata",
"=",
"metadata_dict",
".",
"get",
"(",
"table_name",
",",
"None",
")",
"dataset_dict",
"[",
"model_key",
"]",
"=",
"make_annotation_model",
"(",
"dataset",
",",
"schema_name",
",",
"table_name",
",",
"table_metadata",
"=",
"metadata",
",",
"version",
"=",
"version",
")",
"if",
"include_contacts",
":",
"contact_model",
"=",
"make_annotation_model_from_schema",
"(",
"dataset",
",",
"'contact'",
",",
"Contact",
",",
"version",
"=",
"version",
")",
"dataset_dict",
"[",
"'contact'",
"]",
"=",
"contact_model",
"return",
"dataset_dict"
] | make all the models for a dataset
Parameters
----------
dataset: str
name of dataset
table_and_types: list[(schema_name, table_name)]
list of tuples with types and model names to make
metadata_dict:
a dictionary with keys of table_names and values of metadata dicts needed
version: str
version number to use for making models
include_contacts:
option to include the model for cell contacts
Returns
-------
dict
dictionary where keys are table_names and values are sqlalchemy Models
Raises
------
UnknownAnnotationTypeException
If a type is not a valid annotation type | [
"make",
"all",
"the",
"models",
"for",
"a",
"dataset"
] | ca81eff0f449bd7eb0392e0982db8f3636446a9e | https://github.com/seung-lab/EMAnnotationSchemas/blob/ca81eff0f449bd7eb0392e0982db8f3636446a9e/emannotationschemas/models.py#L112-L158 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._key_name | def _key_name(self): # type: () -> str
"""Return the key referring to this object
The default value is the lower case version of the class name
:rtype: str
"""
if self._key is not None:
return self._key
return self.__class__.__name__.lower() | python | def _key_name(self): # type: () -> str
"""Return the key referring to this object
The default value is the lower case version of the class name
:rtype: str
"""
if self._key is not None:
return self._key
return self.__class__.__name__.lower() | [
"def",
"_key_name",
"(",
"self",
")",
":",
"# type: () -> str",
"if",
"self",
".",
"_key",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_key",
"return",
"self",
".",
"__class__",
".",
"__name__",
".",
"lower",
"(",
")"
] | Return the key referring to this object
The default value is the lower case version of the class name
:rtype: str | [
"Return",
"the",
"key",
"referring",
"to",
"this",
"object"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L77-L87 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._path | def _path(self): # type: () -> str
"""Return the dotted path representation of this object
:rtype: str
"""
if self._parent:
return '{}.{}'.format(self._parent._path(), self._key_name())
return self._key_name() | python | def _path(self): # type: () -> str
"""Return the dotted path representation of this object
:rtype: str
"""
if self._parent:
return '{}.{}'.format(self._parent._path(), self._key_name())
return self._key_name() | [
"def",
"_path",
"(",
"self",
")",
":",
"# type: () -> str",
"if",
"self",
".",
"_parent",
":",
"return",
"'{}.{}'",
".",
"format",
"(",
"self",
".",
"_parent",
".",
"_path",
"(",
")",
",",
"self",
".",
"_key_name",
"(",
")",
")",
"return",
"self",
".",
"_key_name",
"(",
")"
] | Return the dotted path representation of this object
:rtype: str | [
"Return",
"the",
"dotted",
"path",
"representation",
"of",
"this",
"object"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L103-L111 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._add_error | def _add_error(self, *args, **kwargs): # type: () -> None
"""Convenience function to add an error to this object, with line numbers
An error title or description should not accidentally leak self._value, for privacy/redaction purposes.
:rtype: None
"""
if kwargs.get('node', None):
# if node specified and not none
error = ConfigError.create_from_yaml_node(
*args,
**kwargs
)
elif self._value_node:
# default to using the node if we have one
error = ConfigError.create_from_yaml_node(
node=self._value_node,
*args,
**kwargs
)
else:
# no nodes or error_obj to attach
error = ConfigError(*args, **kwargs)
self._errors.append(error) | python | def _add_error(self, *args, **kwargs): # type: () -> None
"""Convenience function to add an error to this object, with line numbers
An error title or description should not accidentally leak self._value, for privacy/redaction purposes.
:rtype: None
"""
if kwargs.get('node', None):
# if node specified and not none
error = ConfigError.create_from_yaml_node(
*args,
**kwargs
)
elif self._value_node:
# default to using the node if we have one
error = ConfigError.create_from_yaml_node(
node=self._value_node,
*args,
**kwargs
)
else:
# no nodes or error_obj to attach
error = ConfigError(*args, **kwargs)
self._errors.append(error) | [
"def",
"_add_error",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: () -> None",
"if",
"kwargs",
".",
"get",
"(",
"'node'",
",",
"None",
")",
":",
"# if node specified and not none",
"error",
"=",
"ConfigError",
".",
"create_from_yaml_node",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"elif",
"self",
".",
"_value_node",
":",
"# default to using the node if we have one",
"error",
"=",
"ConfigError",
".",
"create_from_yaml_node",
"(",
"node",
"=",
"self",
".",
"_value_node",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"# no nodes or error_obj to attach",
"error",
"=",
"ConfigError",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_errors",
".",
"append",
"(",
"error",
")"
] | Convenience function to add an error to this object, with line numbers
An error title or description should not accidentally leak self._value, for privacy/redaction purposes.
:rtype: None | [
"Convenience",
"function",
"to",
"add",
"an",
"error",
"to",
"this",
"object",
"with",
"line",
"numbers"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L113-L137 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._get_descendants_errors | def _get_descendants_errors(self): # type: () -> List(ConfigError)
"""Recursively get errors from descendants
:rtype: list of :ConfigErrors:"""
descendants_errors = []
if hasattr(self, '_children'):
if isinstance(self._children, (list, tuple)):
for c in self._children:
descendants_errors += c._get_all_errors()
elif isinstance(self._children, dict):
for c in self._children.values():
descendants_errors += c._get_all_errors()
return descendants_errors | python | def _get_descendants_errors(self): # type: () -> List(ConfigError)
"""Recursively get errors from descendants
:rtype: list of :ConfigErrors:"""
descendants_errors = []
if hasattr(self, '_children'):
if isinstance(self._children, (list, tuple)):
for c in self._children:
descendants_errors += c._get_all_errors()
elif isinstance(self._children, dict):
for c in self._children.values():
descendants_errors += c._get_all_errors()
return descendants_errors | [
"def",
"_get_descendants_errors",
"(",
"self",
")",
":",
"# type: () -> List(ConfigError)",
"descendants_errors",
"=",
"[",
"]",
"if",
"hasattr",
"(",
"self",
",",
"'_children'",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"_children",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"c",
"in",
"self",
".",
"_children",
":",
"descendants_errors",
"+=",
"c",
".",
"_get_all_errors",
"(",
")",
"elif",
"isinstance",
"(",
"self",
".",
"_children",
",",
"dict",
")",
":",
"for",
"c",
"in",
"self",
".",
"_children",
".",
"values",
"(",
")",
":",
"descendants_errors",
"+=",
"c",
".",
"_get_all_errors",
"(",
")",
"return",
"descendants_errors"
] | Recursively get errors from descendants
:rtype: list of :ConfigErrors: | [
"Recursively",
"get",
"errors",
"from",
"descendants"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L139-L152 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._validate | def _validate(self): # type: () -> None
"""Run validation, save errors to object in self._errors"""
# class can specify it's empty obj -- list would have empty of []
self._errors = []
self._validate_type()
if self.is_valid():
self._validate_value() | python | def _validate(self): # type: () -> None
"""Run validation, save errors to object in self._errors"""
# class can specify it's empty obj -- list would have empty of []
self._errors = []
self._validate_type()
if self.is_valid():
self._validate_value() | [
"def",
"_validate",
"(",
"self",
")",
":",
"# type: () -> None",
"# class can specify it's empty obj -- list would have empty of []",
"self",
".",
"_errors",
"=",
"[",
"]",
"self",
".",
"_validate_type",
"(",
")",
"if",
"self",
".",
"is_valid",
"(",
")",
":",
"self",
".",
"_validate_value",
"(",
")"
] | Run validation, save errors to object in self._errors | [
"Run",
"validation",
"save",
"errors",
"to",
"object",
"in",
"self",
".",
"_errors"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L163-L171 | train |
dropseed/configyaml | configyaml/config/base.py | AbstractNode._validate_type | def _validate_type(self): # type: () -> None
"""Validation to ensure value is the correct type"""
if not isinstance(self._value, self._type):
title = '{} has an invalid type'.format(self._key_name())
description = '{} must be a {}'.format(self._key_name(), self._type.__name__)
self._add_error(title=title, description=description) | python | def _validate_type(self): # type: () -> None
"""Validation to ensure value is the correct type"""
if not isinstance(self._value, self._type):
title = '{} has an invalid type'.format(self._key_name())
description = '{} must be a {}'.format(self._key_name(), self._type.__name__)
self._add_error(title=title, description=description) | [
"def",
"_validate_type",
"(",
"self",
")",
":",
"# type: () -> None",
"if",
"not",
"isinstance",
"(",
"self",
".",
"_value",
",",
"self",
".",
"_type",
")",
":",
"title",
"=",
"'{} has an invalid type'",
".",
"format",
"(",
"self",
".",
"_key_name",
"(",
")",
")",
"description",
"=",
"'{} must be a {}'",
".",
"format",
"(",
"self",
".",
"_key_name",
"(",
")",
",",
"self",
".",
"_type",
".",
"__name__",
")",
"self",
".",
"_add_error",
"(",
"title",
"=",
"title",
",",
"description",
"=",
"description",
")"
] | Validation to ensure value is the correct type | [
"Validation",
"to",
"ensure",
"value",
"is",
"the",
"correct",
"type"
] | d008f251530d054c2d1fb3e8ac1a9030436134c8 | https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L173-L179 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.haveSnapshots | def haveSnapshots(self):
"""Check if we have at least one snapshot."""
return os.path.islink(self.latestLink) and os.path.isdir(self.latestLink) | python | def haveSnapshots(self):
"""Check if we have at least one snapshot."""
return os.path.islink(self.latestLink) and os.path.isdir(self.latestLink) | [
"def",
"haveSnapshots",
"(",
"self",
")",
":",
"return",
"os",
".",
"path",
".",
"islink",
"(",
"self",
".",
"latestLink",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"latestLink",
")"
] | Check if we have at least one snapshot. | [
"Check",
"if",
"we",
"have",
"at",
"least",
"one",
"snapshot",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L71-L73 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.fromScratch | def fromScratch(self):
"""Start a fresh experiment, from scratch.
Returns `self`."""
assert(not os.path.lexists(self.latestLink) or
os.path.islink (self.latestLink))
self.rmR(self.latestLink)
return self | python | def fromScratch(self):
"""Start a fresh experiment, from scratch.
Returns `self`."""
assert(not os.path.lexists(self.latestLink) or
os.path.islink (self.latestLink))
self.rmR(self.latestLink)
return self | [
"def",
"fromScratch",
"(",
"self",
")",
":",
"assert",
"(",
"not",
"os",
".",
"path",
".",
"lexists",
"(",
"self",
".",
"latestLink",
")",
"or",
"os",
".",
"path",
".",
"islink",
"(",
"self",
".",
"latestLink",
")",
")",
"self",
".",
"rmR",
"(",
"self",
".",
"latestLink",
")",
"return",
"self"
] | Start a fresh experiment, from scratch.
Returns `self`. | [
"Start",
"a",
"fresh",
"experiment",
"from",
"scratch",
".",
"Returns",
"self",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L112-L120 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.snapshot | def snapshot (self):
"""Take a snapshot of the experiment.
Returns `self`."""
nextSnapshotNum = self.nextSnapshotNum
nextSnapshotPath = self.getFullPathToSnapshot(nextSnapshotNum)
if os.path.lexists(nextSnapshotPath):
self.rmR(nextSnapshotPath)
self.mkdirp(os.path.join(nextSnapshotPath, ".experiment"))
return self.dump(nextSnapshotPath).__markLatest(nextSnapshotNum) | python | def snapshot (self):
"""Take a snapshot of the experiment.
Returns `self`."""
nextSnapshotNum = self.nextSnapshotNum
nextSnapshotPath = self.getFullPathToSnapshot(nextSnapshotNum)
if os.path.lexists(nextSnapshotPath):
self.rmR(nextSnapshotPath)
self.mkdirp(os.path.join(nextSnapshotPath, ".experiment"))
return self.dump(nextSnapshotPath).__markLatest(nextSnapshotNum) | [
"def",
"snapshot",
"(",
"self",
")",
":",
"nextSnapshotNum",
"=",
"self",
".",
"nextSnapshotNum",
"nextSnapshotPath",
"=",
"self",
".",
"getFullPathToSnapshot",
"(",
"nextSnapshotNum",
")",
"if",
"os",
".",
"path",
".",
"lexists",
"(",
"nextSnapshotPath",
")",
":",
"self",
".",
"rmR",
"(",
"nextSnapshotPath",
")",
"self",
".",
"mkdirp",
"(",
"os",
".",
"path",
".",
"join",
"(",
"nextSnapshotPath",
",",
"\".experiment\"",
")",
")",
"return",
"self",
".",
"dump",
"(",
"nextSnapshotPath",
")",
".",
"__markLatest",
"(",
"nextSnapshotNum",
")"
] | Take a snapshot of the experiment.
Returns `self`. | [
"Take",
"a",
"snapshot",
"of",
"the",
"experiment",
".",
"Returns",
"self",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L136-L146 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.rollback | def rollback (self, n=None):
"""Roll back the experiment to the given snapshot number.
Returns `self`."""
if n is None:
if self.haveSnapshots: return self.fromSnapshot(self.latestLink)
else: return self.fromScratch()
elif isinstance(n, int):
loadSnapshotPath = self.getFullPathToSnapshot(n)
assert(os.path.isdir(loadSnapshotPath))
return self.__markLatest(n).fromSnapshot(loadSnapshotPath)
else:
raise ValueError("n must be int, or None!") | python | def rollback (self, n=None):
"""Roll back the experiment to the given snapshot number.
Returns `self`."""
if n is None:
if self.haveSnapshots: return self.fromSnapshot(self.latestLink)
else: return self.fromScratch()
elif isinstance(n, int):
loadSnapshotPath = self.getFullPathToSnapshot(n)
assert(os.path.isdir(loadSnapshotPath))
return self.__markLatest(n).fromSnapshot(loadSnapshotPath)
else:
raise ValueError("n must be int, or None!") | [
"def",
"rollback",
"(",
"self",
",",
"n",
"=",
"None",
")",
":",
"if",
"n",
"is",
"None",
":",
"if",
"self",
".",
"haveSnapshots",
":",
"return",
"self",
".",
"fromSnapshot",
"(",
"self",
".",
"latestLink",
")",
"else",
":",
"return",
"self",
".",
"fromScratch",
"(",
")",
"elif",
"isinstance",
"(",
"n",
",",
"int",
")",
":",
"loadSnapshotPath",
"=",
"self",
".",
"getFullPathToSnapshot",
"(",
"n",
")",
"assert",
"(",
"os",
".",
"path",
".",
"isdir",
"(",
"loadSnapshotPath",
")",
")",
"return",
"self",
".",
"__markLatest",
"(",
"n",
")",
".",
"fromSnapshot",
"(",
"loadSnapshotPath",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"n must be int, or None!\"",
")"
] | Roll back the experiment to the given snapshot number.
Returns `self`. | [
"Roll",
"back",
"the",
"experiment",
"to",
"the",
"given",
"snapshot",
"number",
".",
"Returns",
"self",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L148-L161 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.getFullPathToSnapshot | def getFullPathToSnapshot(self, n):
"""Get the full path to snapshot n."""
return os.path.join(self.snapDir, str(n)) | python | def getFullPathToSnapshot(self, n):
"""Get the full path to snapshot n."""
return os.path.join(self.snapDir, str(n)) | [
"def",
"getFullPathToSnapshot",
"(",
"self",
",",
"n",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"snapDir",
",",
"str",
"(",
"n",
")",
")"
] | Get the full path to snapshot n. | [
"Get",
"the",
"full",
"path",
"to",
"snapshot",
"n",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L201-L203 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.strategyLastK | def strategyLastK(kls, n, k=10):
"""Return the directory names to preserve under the LastK purge strategy."""
return set(map(str, filter(lambda x:x>=0, range(n, n-k, -1)))) | python | def strategyLastK(kls, n, k=10):
"""Return the directory names to preserve under the LastK purge strategy."""
return set(map(str, filter(lambda x:x>=0, range(n, n-k, -1)))) | [
"def",
"strategyLastK",
"(",
"kls",
",",
"n",
",",
"k",
"=",
"10",
")",
":",
"return",
"set",
"(",
"map",
"(",
"str",
",",
"filter",
"(",
"lambda",
"x",
":",
"x",
">=",
"0",
",",
"range",
"(",
"n",
",",
"n",
"-",
"k",
",",
"-",
"1",
")",
")",
")",
")"
] | Return the directory names to preserve under the LastK purge strategy. | [
"Return",
"the",
"directory",
"names",
"to",
"preserve",
"under",
"the",
"LastK",
"purge",
"strategy",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L215-L217 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.strategyKLogN | def strategyKLogN(kls, n, k=4):
"""Return the directory names to preserve under the KLogN purge strategy."""
assert(k>1)
s = set([n])
i = 0
while k**i <= n:
s.update(range(n, n-k*k**i, -k**i))
i += 1
n -= n % k**i
return set(map(str, filter(lambda x:x>=0, s))) | python | def strategyKLogN(kls, n, k=4):
"""Return the directory names to preserve under the KLogN purge strategy."""
assert(k>1)
s = set([n])
i = 0
while k**i <= n:
s.update(range(n, n-k*k**i, -k**i))
i += 1
n -= n % k**i
return set(map(str, filter(lambda x:x>=0, s))) | [
"def",
"strategyKLogN",
"(",
"kls",
",",
"n",
",",
"k",
"=",
"4",
")",
":",
"assert",
"(",
"k",
">",
"1",
")",
"s",
"=",
"set",
"(",
"[",
"n",
"]",
")",
"i",
"=",
"0",
"while",
"k",
"**",
"i",
"<=",
"n",
":",
"s",
".",
"update",
"(",
"range",
"(",
"n",
",",
"n",
"-",
"k",
"*",
"k",
"**",
"i",
",",
"-",
"k",
"**",
"i",
")",
")",
"i",
"+=",
"1",
"n",
"-=",
"n",
"%",
"k",
"**",
"i",
"return",
"set",
"(",
"map",
"(",
"str",
",",
"filter",
"(",
"lambda",
"x",
":",
"x",
">=",
"0",
",",
"s",
")",
")",
")"
] | Return the directory names to preserve under the KLogN purge strategy. | [
"Return",
"the",
"directory",
"names",
"to",
"preserve",
"under",
"the",
"KLogN",
"purge",
"strategy",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L220-L231 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.listSnapshotDir | def listSnapshotDir(kls, path):
"""Return the set of snapshot directories and non-snapshot directories
under the given path."""
snapshotSet = set()
nonsnapshotSet = set()
try:
entryList = os.listdir(path)
for e in entryList:
if kls.isFilenameInteger(e): snapshotSet .add(e)
else: nonsnapshotSet.add(e)
except FileNotFoundError: pass
finally:
return snapshotSet, nonsnapshotSet | python | def listSnapshotDir(kls, path):
"""Return the set of snapshot directories and non-snapshot directories
under the given path."""
snapshotSet = set()
nonsnapshotSet = set()
try:
entryList = os.listdir(path)
for e in entryList:
if kls.isFilenameInteger(e): snapshotSet .add(e)
else: nonsnapshotSet.add(e)
except FileNotFoundError: pass
finally:
return snapshotSet, nonsnapshotSet | [
"def",
"listSnapshotDir",
"(",
"kls",
",",
"path",
")",
":",
"snapshotSet",
"=",
"set",
"(",
")",
"nonsnapshotSet",
"=",
"set",
"(",
")",
"try",
":",
"entryList",
"=",
"os",
".",
"listdir",
"(",
"path",
")",
"for",
"e",
"in",
"entryList",
":",
"if",
"kls",
".",
"isFilenameInteger",
"(",
"e",
")",
":",
"snapshotSet",
".",
"add",
"(",
"e",
")",
"else",
":",
"nonsnapshotSet",
".",
"add",
"(",
"e",
")",
"except",
"FileNotFoundError",
":",
"pass",
"finally",
":",
"return",
"snapshotSet",
",",
"nonsnapshotSet"
] | Return the set of snapshot directories and non-snapshot directories
under the given path. | [
"Return",
"the",
"set",
"of",
"snapshot",
"directories",
"and",
"non",
"-",
"snapshot",
"directories",
"under",
"the",
"given",
"path",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L249-L261 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.rmR | def rmR(kls, path):
"""`rm -R path`. Deletes, but does not recurse into, symlinks.
If the path does not exist, silently return."""
if os.path.islink(path) or os.path.isfile(path):
os.unlink(path)
elif os.path.isdir(path):
walker = os.walk(path, topdown=False, followlinks=False)
for dirpath, dirnames, filenames in walker:
for f in filenames:
os.unlink(os.path.join(dirpath, f))
for d in dirnames:
os.rmdir (os.path.join(dirpath, d))
os.rmdir(path) | python | def rmR(kls, path):
"""`rm -R path`. Deletes, but does not recurse into, symlinks.
If the path does not exist, silently return."""
if os.path.islink(path) or os.path.isfile(path):
os.unlink(path)
elif os.path.isdir(path):
walker = os.walk(path, topdown=False, followlinks=False)
for dirpath, dirnames, filenames in walker:
for f in filenames:
os.unlink(os.path.join(dirpath, f))
for d in dirnames:
os.rmdir (os.path.join(dirpath, d))
os.rmdir(path) | [
"def",
"rmR",
"(",
"kls",
",",
"path",
")",
":",
"if",
"os",
".",
"path",
".",
"islink",
"(",
"path",
")",
"or",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"os",
".",
"unlink",
"(",
"path",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"walker",
"=",
"os",
".",
"walk",
"(",
"path",
",",
"topdown",
"=",
"False",
",",
"followlinks",
"=",
"False",
")",
"for",
"dirpath",
",",
"dirnames",
",",
"filenames",
"in",
"walker",
":",
"for",
"f",
"in",
"filenames",
":",
"os",
".",
"unlink",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"f",
")",
")",
"for",
"d",
"in",
"dirnames",
":",
"os",
".",
"rmdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"d",
")",
")",
"os",
".",
"rmdir",
"(",
"path",
")"
] | `rm -R path`. Deletes, but does not recurse into, symlinks.
If the path does not exist, silently return. | [
"rm",
"-",
"R",
"path",
".",
"Deletes",
"but",
"does",
"not",
"recurse",
"into",
"symlinks",
".",
"If",
"the",
"path",
"does",
"not",
"exist",
"silently",
"return",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L264-L276 | train |
obilaniu/Nauka | src/nauka/exp/experiment.py | Experiment.atomicSymlink | def atomicSymlink(kls, target, name):
"""Same syntax as os.symlink, except that the new link called `name`
will first be created with the `name` and `target`
`name.ATOMIC` -> `target`
, then be atomically renamed to
`name` -> `target`
, thus overwriting any previous symlink there. If a filesystem entity
called `name.ATOMIC` already exists, it will be forcibly removed.
"""
linkAtomicName = name+".ATOMIC"
linkFinalName = name
linkTarget = target
if os.path.lexists(linkAtomicName):
kls.rmR(linkAtomicName)
os.symlink(linkTarget, linkAtomicName)
################################################
######## FILESYSTEM LINEARIZATION POINT ########
######## vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv ########
os.rename (linkAtomicName, linkFinalName) | python | def atomicSymlink(kls, target, name):
"""Same syntax as os.symlink, except that the new link called `name`
will first be created with the `name` and `target`
`name.ATOMIC` -> `target`
, then be atomically renamed to
`name` -> `target`
, thus overwriting any previous symlink there. If a filesystem entity
called `name.ATOMIC` already exists, it will be forcibly removed.
"""
linkAtomicName = name+".ATOMIC"
linkFinalName = name
linkTarget = target
if os.path.lexists(linkAtomicName):
kls.rmR(linkAtomicName)
os.symlink(linkTarget, linkAtomicName)
################################################
######## FILESYSTEM LINEARIZATION POINT ########
######## vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv ########
os.rename (linkAtomicName, linkFinalName) | [
"def",
"atomicSymlink",
"(",
"kls",
",",
"target",
",",
"name",
")",
":",
"linkAtomicName",
"=",
"name",
"+",
"\".ATOMIC\"",
"linkFinalName",
"=",
"name",
"linkTarget",
"=",
"target",
"if",
"os",
".",
"path",
".",
"lexists",
"(",
"linkAtomicName",
")",
":",
"kls",
".",
"rmR",
"(",
"linkAtomicName",
")",
"os",
".",
"symlink",
"(",
"linkTarget",
",",
"linkAtomicName",
")",
"################################################",
"######## FILESYSTEM LINEARIZATION POINT ########",
"######## vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv ########",
"os",
".",
"rename",
"(",
"linkAtomicName",
",",
"linkFinalName",
")"
] | Same syntax as os.symlink, except that the new link called `name`
will first be created with the `name` and `target`
`name.ATOMIC` -> `target`
, then be atomically renamed to
`name` -> `target`
, thus overwriting any previous symlink there. If a filesystem entity
called `name.ATOMIC` already exists, it will be forcibly removed. | [
"Same",
"syntax",
"as",
"os",
".",
"symlink",
"except",
"that",
"the",
"new",
"link",
"called",
"name",
"will",
"first",
"be",
"created",
"with",
"the",
"name",
"and",
"target",
"name",
".",
"ATOMIC",
"-",
">",
"target",
"then",
"be",
"atomically",
"renamed",
"to",
"name",
"-",
">",
"target",
"thus",
"overwriting",
"any",
"previous",
"symlink",
"there",
".",
"If",
"a",
"filesystem",
"entity",
"called",
"name",
".",
"ATOMIC",
"already",
"exists",
"it",
"will",
"be",
"forcibly",
"removed",
"."
] | 1492a4f9d204a868c1a8a1d327bd108490b856b4 | https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L279-L304 | train |
azogue/i2csense | i2csense/bme280.py | BME280._compensate_temperature | def _compensate_temperature(self, adc_t):
"""Compensate temperature.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015
"""
var_1 = ((adc_t / 16384.0 - self._calibration_t[0] / 1024.0)
* self._calibration_t[1])
var_2 = ((adc_t / 131072.0 - self._calibration_t[0] / 8192.0)
* (adc_t / 131072.0 - self._calibration_t[0] / 8192.0)
* self._calibration_t[2])
self._temp_fine = var_1 + var_2
if self._delta_temp != 0.: # temperature correction for self heating
temp = self._temp_fine / 5120.0 + self._delta_temp
self._temp_fine = temp * 5120.0
else:
temp = self._temp_fine / 5120.0
return temp | python | def _compensate_temperature(self, adc_t):
"""Compensate temperature.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015
"""
var_1 = ((adc_t / 16384.0 - self._calibration_t[0] / 1024.0)
* self._calibration_t[1])
var_2 = ((adc_t / 131072.0 - self._calibration_t[0] / 8192.0)
* (adc_t / 131072.0 - self._calibration_t[0] / 8192.0)
* self._calibration_t[2])
self._temp_fine = var_1 + var_2
if self._delta_temp != 0.: # temperature correction for self heating
temp = self._temp_fine / 5120.0 + self._delta_temp
self._temp_fine = temp * 5120.0
else:
temp = self._temp_fine / 5120.0
return temp | [
"def",
"_compensate_temperature",
"(",
"self",
",",
"adc_t",
")",
":",
"var_1",
"=",
"(",
"(",
"adc_t",
"/",
"16384.0",
"-",
"self",
".",
"_calibration_t",
"[",
"0",
"]",
"/",
"1024.0",
")",
"*",
"self",
".",
"_calibration_t",
"[",
"1",
"]",
")",
"var_2",
"=",
"(",
"(",
"adc_t",
"/",
"131072.0",
"-",
"self",
".",
"_calibration_t",
"[",
"0",
"]",
"/",
"8192.0",
")",
"*",
"(",
"adc_t",
"/",
"131072.0",
"-",
"self",
".",
"_calibration_t",
"[",
"0",
"]",
"/",
"8192.0",
")",
"*",
"self",
".",
"_calibration_t",
"[",
"2",
"]",
")",
"self",
".",
"_temp_fine",
"=",
"var_1",
"+",
"var_2",
"if",
"self",
".",
"_delta_temp",
"!=",
"0.",
":",
"# temperature correction for self heating",
"temp",
"=",
"self",
".",
"_temp_fine",
"/",
"5120.0",
"+",
"self",
".",
"_delta_temp",
"self",
".",
"_temp_fine",
"=",
"temp",
"*",
"5120.0",
"else",
":",
"temp",
"=",
"self",
".",
"_temp_fine",
"/",
"5120.0",
"return",
"temp"
] | Compensate temperature.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015 | [
"Compensate",
"temperature",
"."
] | ecc6806dcee9de827a5414a9e836d271fedca9b9 | https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L61-L79 | train |
azogue/i2csense | i2csense/bme280.py | BME280._compensate_pressure | def _compensate_pressure(self, adc_p):
"""Compensate pressure.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015.
"""
var_1 = (self._temp_fine / 2.0) - 64000.0
var_2 = ((var_1 / 4.0) * (var_1 / 4.0)) / 2048
var_2 *= self._calibration_p[5]
var_2 += ((var_1 * self._calibration_p[4]) * 2.0)
var_2 = (var_2 / 4.0) + (self._calibration_p[3] * 65536.0)
var_1 = (((self._calibration_p[2]
* (((var_1 / 4.0) * (var_1 / 4.0)) / 8192)) / 8)
+ ((self._calibration_p[1] * var_1) / 2.0))
var_1 /= 262144
var_1 = ((32768 + var_1) * self._calibration_p[0]) / 32768
if var_1 == 0:
return 0
pressure = ((1048576 - adc_p) - (var_2 / 4096)) * 3125
if pressure < 0x80000000:
pressure = (pressure * 2.0) / var_1
else:
pressure = (pressure / var_1) * 2
var_1 = (self._calibration_p[8]
* (((pressure / 8.0) * (pressure / 8.0)) / 8192.0)) / 4096
var_2 = ((pressure / 4.0) * self._calibration_p[7]) / 8192.0
pressure += ((var_1 + var_2 + self._calibration_p[6]) / 16.0)
return pressure / 100 | python | def _compensate_pressure(self, adc_p):
"""Compensate pressure.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015.
"""
var_1 = (self._temp_fine / 2.0) - 64000.0
var_2 = ((var_1 / 4.0) * (var_1 / 4.0)) / 2048
var_2 *= self._calibration_p[5]
var_2 += ((var_1 * self._calibration_p[4]) * 2.0)
var_2 = (var_2 / 4.0) + (self._calibration_p[3] * 65536.0)
var_1 = (((self._calibration_p[2]
* (((var_1 / 4.0) * (var_1 / 4.0)) / 8192)) / 8)
+ ((self._calibration_p[1] * var_1) / 2.0))
var_1 /= 262144
var_1 = ((32768 + var_1) * self._calibration_p[0]) / 32768
if var_1 == 0:
return 0
pressure = ((1048576 - adc_p) - (var_2 / 4096)) * 3125
if pressure < 0x80000000:
pressure = (pressure * 2.0) / var_1
else:
pressure = (pressure / var_1) * 2
var_1 = (self._calibration_p[8]
* (((pressure / 8.0) * (pressure / 8.0)) / 8192.0)) / 4096
var_2 = ((pressure / 4.0) * self._calibration_p[7]) / 8192.0
pressure += ((var_1 + var_2 + self._calibration_p[6]) / 16.0)
return pressure / 100 | [
"def",
"_compensate_pressure",
"(",
"self",
",",
"adc_p",
")",
":",
"var_1",
"=",
"(",
"self",
".",
"_temp_fine",
"/",
"2.0",
")",
"-",
"64000.0",
"var_2",
"=",
"(",
"(",
"var_1",
"/",
"4.0",
")",
"*",
"(",
"var_1",
"/",
"4.0",
")",
")",
"/",
"2048",
"var_2",
"*=",
"self",
".",
"_calibration_p",
"[",
"5",
"]",
"var_2",
"+=",
"(",
"(",
"var_1",
"*",
"self",
".",
"_calibration_p",
"[",
"4",
"]",
")",
"*",
"2.0",
")",
"var_2",
"=",
"(",
"var_2",
"/",
"4.0",
")",
"+",
"(",
"self",
".",
"_calibration_p",
"[",
"3",
"]",
"*",
"65536.0",
")",
"var_1",
"=",
"(",
"(",
"(",
"self",
".",
"_calibration_p",
"[",
"2",
"]",
"*",
"(",
"(",
"(",
"var_1",
"/",
"4.0",
")",
"*",
"(",
"var_1",
"/",
"4.0",
")",
")",
"/",
"8192",
")",
")",
"/",
"8",
")",
"+",
"(",
"(",
"self",
".",
"_calibration_p",
"[",
"1",
"]",
"*",
"var_1",
")",
"/",
"2.0",
")",
")",
"var_1",
"/=",
"262144",
"var_1",
"=",
"(",
"(",
"32768",
"+",
"var_1",
")",
"*",
"self",
".",
"_calibration_p",
"[",
"0",
"]",
")",
"/",
"32768",
"if",
"var_1",
"==",
"0",
":",
"return",
"0",
"pressure",
"=",
"(",
"(",
"1048576",
"-",
"adc_p",
")",
"-",
"(",
"var_2",
"/",
"4096",
")",
")",
"*",
"3125",
"if",
"pressure",
"<",
"0x80000000",
":",
"pressure",
"=",
"(",
"pressure",
"*",
"2.0",
")",
"/",
"var_1",
"else",
":",
"pressure",
"=",
"(",
"pressure",
"/",
"var_1",
")",
"*",
"2",
"var_1",
"=",
"(",
"self",
".",
"_calibration_p",
"[",
"8",
"]",
"*",
"(",
"(",
"(",
"pressure",
"/",
"8.0",
")",
"*",
"(",
"pressure",
"/",
"8.0",
")",
")",
"/",
"8192.0",
")",
")",
"/",
"4096",
"var_2",
"=",
"(",
"(",
"pressure",
"/",
"4.0",
")",
"*",
"self",
".",
"_calibration_p",
"[",
"7",
"]",
")",
"/",
"8192.0",
"pressure",
"+=",
"(",
"(",
"var_1",
"+",
"var_2",
"+",
"self",
".",
"_calibration_p",
"[",
"6",
"]",
")",
"/",
"16.0",
")",
"return",
"pressure",
"/",
"100"
] | Compensate pressure.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015. | [
"Compensate",
"pressure",
"."
] | ecc6806dcee9de827a5414a9e836d271fedca9b9 | https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L81-L113 | train |
azogue/i2csense | i2csense/bme280.py | BME280._compensate_humidity | def _compensate_humidity(self, adc_h):
"""Compensate humidity.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015.
"""
var_h = self._temp_fine - 76800.0
if var_h == 0:
return 0
var_h = ((adc_h - (self._calibration_h[3] * 64.0 +
self._calibration_h[4] / 16384.0 * var_h))
* (self._calibration_h[1] / 65536.0
* (1.0 + self._calibration_h[5] / 67108864.0 * var_h
* (1.0 + self._calibration_h[2] / 67108864.0 * var_h))))
var_h *= 1.0 - self._calibration_h[0] * var_h / 524288.0
if var_h > 100.0:
var_h = 100.0
elif var_h < 0.0:
var_h = 0.0
return var_h | python | def _compensate_humidity(self, adc_h):
"""Compensate humidity.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015.
"""
var_h = self._temp_fine - 76800.0
if var_h == 0:
return 0
var_h = ((adc_h - (self._calibration_h[3] * 64.0 +
self._calibration_h[4] / 16384.0 * var_h))
* (self._calibration_h[1] / 65536.0
* (1.0 + self._calibration_h[5] / 67108864.0 * var_h
* (1.0 + self._calibration_h[2] / 67108864.0 * var_h))))
var_h *= 1.0 - self._calibration_h[0] * var_h / 524288.0
if var_h > 100.0:
var_h = 100.0
elif var_h < 0.0:
var_h = 0.0
return var_h | [
"def",
"_compensate_humidity",
"(",
"self",
",",
"adc_h",
")",
":",
"var_h",
"=",
"self",
".",
"_temp_fine",
"-",
"76800.0",
"if",
"var_h",
"==",
"0",
":",
"return",
"0",
"var_h",
"=",
"(",
"(",
"adc_h",
"-",
"(",
"self",
".",
"_calibration_h",
"[",
"3",
"]",
"*",
"64.0",
"+",
"self",
".",
"_calibration_h",
"[",
"4",
"]",
"/",
"16384.0",
"*",
"var_h",
")",
")",
"*",
"(",
"self",
".",
"_calibration_h",
"[",
"1",
"]",
"/",
"65536.0",
"*",
"(",
"1.0",
"+",
"self",
".",
"_calibration_h",
"[",
"5",
"]",
"/",
"67108864.0",
"*",
"var_h",
"*",
"(",
"1.0",
"+",
"self",
".",
"_calibration_h",
"[",
"2",
"]",
"/",
"67108864.0",
"*",
"var_h",
")",
")",
")",
")",
"var_h",
"*=",
"1.0",
"-",
"self",
".",
"_calibration_h",
"[",
"0",
"]",
"*",
"var_h",
"/",
"524288.0",
"if",
"var_h",
">",
"100.0",
":",
"var_h",
"=",
"100.0",
"elif",
"var_h",
"<",
"0.0",
":",
"var_h",
"=",
"0.0",
"return",
"var_h"
] | Compensate humidity.
Formula from datasheet Bosch BME280 Environmental sensor.
8.1 Compensation formulas in double precision floating point
Edition BST-BME280-DS001-10 | Revision 1.1 | May 2015. | [
"Compensate",
"humidity",
"."
] | ecc6806dcee9de827a5414a9e836d271fedca9b9 | https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L115-L138 | train |
azogue/i2csense | i2csense/bme280.py | BME280._take_forced_measurement | def _take_forced_measurement(self):
"""Take a forced measurement.
In forced mode, the BME sensor goes back to sleep after each
measurement and we need to set it to forced mode once at this point,
so it will take the next measurement and then return to sleep again.
In normal mode simply does new measurements periodically.
"""
# set to forced mode, i.e. "take next measurement"
self._bus.write_byte_data(self._i2c_add, 0xF4, self.ctrl_meas_reg)
while self._bus.read_byte_data(self._i2c_add, 0xF3) & 0x08:
sleep(0.005) | python | def _take_forced_measurement(self):
"""Take a forced measurement.
In forced mode, the BME sensor goes back to sleep after each
measurement and we need to set it to forced mode once at this point,
so it will take the next measurement and then return to sleep again.
In normal mode simply does new measurements periodically.
"""
# set to forced mode, i.e. "take next measurement"
self._bus.write_byte_data(self._i2c_add, 0xF4, self.ctrl_meas_reg)
while self._bus.read_byte_data(self._i2c_add, 0xF3) & 0x08:
sleep(0.005) | [
"def",
"_take_forced_measurement",
"(",
"self",
")",
":",
"# set to forced mode, i.e. \"take next measurement\"",
"self",
".",
"_bus",
".",
"write_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"0xF4",
",",
"self",
".",
"ctrl_meas_reg",
")",
"while",
"self",
".",
"_bus",
".",
"read_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"0xF3",
")",
"&",
"0x08",
":",
"sleep",
"(",
"0.005",
")"
] | Take a forced measurement.
In forced mode, the BME sensor goes back to sleep after each
measurement and we need to set it to forced mode once at this point,
so it will take the next measurement and then return to sleep again.
In normal mode simply does new measurements periodically. | [
"Take",
"a",
"forced",
"measurement",
"."
] | ecc6806dcee9de827a5414a9e836d271fedca9b9 | https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L202-L213 | train |
azogue/i2csense | i2csense/bme280.py | BME280.update | def update(self, first_reading=False):
"""Read raw data and update compensated variables."""
try:
if first_reading or not self._ok:
self._bus.write_byte_data(self._i2c_add, 0xF2,
self.ctrl_hum_reg)
self._bus.write_byte_data(self._i2c_add, 0xF5, self.config_reg)
self._bus.write_byte_data(self._i2c_add, 0xF4,
self.ctrl_meas_reg)
self._populate_calibration_data()
if self.mode == 2: # MODE_FORCED
self._take_forced_measurement()
data = []
for i in range(0xF7, 0xF7 + 8):
data.append(self._bus.read_byte_data(self._i2c_add, i))
except OSError as exc:
self.log_error("Bad update: %s", exc)
self._ok = False
return
pres_raw = (data[0] << 12) | (data[1] << 4) | (data[2] >> 4)
temp_raw = (data[3] << 12) | (data[4] << 4) | (data[5] >> 4)
hum_raw = (data[6] << 8) | data[7]
self._ok = False
temperature = self._compensate_temperature(temp_raw)
if (temperature >= -20) and (temperature < 80):
self._temperature = temperature
self._ok = True
if self._with_humidity:
humidity = self._compensate_humidity(hum_raw)
if (humidity >= 0) and (humidity <= 100):
self._humidity = humidity
else:
self._ok = False
if self._with_pressure:
pressure = self._compensate_pressure(pres_raw)
if pressure > 100:
self._pressure = pressure
else:
self._ok = False | python | def update(self, first_reading=False):
"""Read raw data and update compensated variables."""
try:
if first_reading or not self._ok:
self._bus.write_byte_data(self._i2c_add, 0xF2,
self.ctrl_hum_reg)
self._bus.write_byte_data(self._i2c_add, 0xF5, self.config_reg)
self._bus.write_byte_data(self._i2c_add, 0xF4,
self.ctrl_meas_reg)
self._populate_calibration_data()
if self.mode == 2: # MODE_FORCED
self._take_forced_measurement()
data = []
for i in range(0xF7, 0xF7 + 8):
data.append(self._bus.read_byte_data(self._i2c_add, i))
except OSError as exc:
self.log_error("Bad update: %s", exc)
self._ok = False
return
pres_raw = (data[0] << 12) | (data[1] << 4) | (data[2] >> 4)
temp_raw = (data[3] << 12) | (data[4] << 4) | (data[5] >> 4)
hum_raw = (data[6] << 8) | data[7]
self._ok = False
temperature = self._compensate_temperature(temp_raw)
if (temperature >= -20) and (temperature < 80):
self._temperature = temperature
self._ok = True
if self._with_humidity:
humidity = self._compensate_humidity(hum_raw)
if (humidity >= 0) and (humidity <= 100):
self._humidity = humidity
else:
self._ok = False
if self._with_pressure:
pressure = self._compensate_pressure(pres_raw)
if pressure > 100:
self._pressure = pressure
else:
self._ok = False | [
"def",
"update",
"(",
"self",
",",
"first_reading",
"=",
"False",
")",
":",
"try",
":",
"if",
"first_reading",
"or",
"not",
"self",
".",
"_ok",
":",
"self",
".",
"_bus",
".",
"write_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"0xF2",
",",
"self",
".",
"ctrl_hum_reg",
")",
"self",
".",
"_bus",
".",
"write_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"0xF5",
",",
"self",
".",
"config_reg",
")",
"self",
".",
"_bus",
".",
"write_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"0xF4",
",",
"self",
".",
"ctrl_meas_reg",
")",
"self",
".",
"_populate_calibration_data",
"(",
")",
"if",
"self",
".",
"mode",
"==",
"2",
":",
"# MODE_FORCED",
"self",
".",
"_take_forced_measurement",
"(",
")",
"data",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0xF7",
",",
"0xF7",
"+",
"8",
")",
":",
"data",
".",
"append",
"(",
"self",
".",
"_bus",
".",
"read_byte_data",
"(",
"self",
".",
"_i2c_add",
",",
"i",
")",
")",
"except",
"OSError",
"as",
"exc",
":",
"self",
".",
"log_error",
"(",
"\"Bad update: %s\"",
",",
"exc",
")",
"self",
".",
"_ok",
"=",
"False",
"return",
"pres_raw",
"=",
"(",
"data",
"[",
"0",
"]",
"<<",
"12",
")",
"|",
"(",
"data",
"[",
"1",
"]",
"<<",
"4",
")",
"|",
"(",
"data",
"[",
"2",
"]",
">>",
"4",
")",
"temp_raw",
"=",
"(",
"data",
"[",
"3",
"]",
"<<",
"12",
")",
"|",
"(",
"data",
"[",
"4",
"]",
"<<",
"4",
")",
"|",
"(",
"data",
"[",
"5",
"]",
">>",
"4",
")",
"hum_raw",
"=",
"(",
"data",
"[",
"6",
"]",
"<<",
"8",
")",
"|",
"data",
"[",
"7",
"]",
"self",
".",
"_ok",
"=",
"False",
"temperature",
"=",
"self",
".",
"_compensate_temperature",
"(",
"temp_raw",
")",
"if",
"(",
"temperature",
">=",
"-",
"20",
")",
"and",
"(",
"temperature",
"<",
"80",
")",
":",
"self",
".",
"_temperature",
"=",
"temperature",
"self",
".",
"_ok",
"=",
"True",
"if",
"self",
".",
"_with_humidity",
":",
"humidity",
"=",
"self",
".",
"_compensate_humidity",
"(",
"hum_raw",
")",
"if",
"(",
"humidity",
">=",
"0",
")",
"and",
"(",
"humidity",
"<=",
"100",
")",
":",
"self",
".",
"_humidity",
"=",
"humidity",
"else",
":",
"self",
".",
"_ok",
"=",
"False",
"if",
"self",
".",
"_with_pressure",
":",
"pressure",
"=",
"self",
".",
"_compensate_pressure",
"(",
"pres_raw",
")",
"if",
"pressure",
">",
"100",
":",
"self",
".",
"_pressure",
"=",
"pressure",
"else",
":",
"self",
".",
"_ok",
"=",
"False"
] | Read raw data and update compensated variables. | [
"Read",
"raw",
"data",
"and",
"update",
"compensated",
"variables",
"."
] | ecc6806dcee9de827a5414a9e836d271fedca9b9 | https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L215-L257 | train |
openvax/varlens | varlens/read_evidence/pileup.py | Pileup.append | def append(self, element):
'''
Append a PileupElement to this Pileup. If an identical PileupElement is
already part of this Pileup, do nothing.
'''
assert element.locus == self.locus, (
"Element locus (%s) != Pileup locus (%s)"
% (element.locus, self.locus))
self.elements[element] = None | python | def append(self, element):
'''
Append a PileupElement to this Pileup. If an identical PileupElement is
already part of this Pileup, do nothing.
'''
assert element.locus == self.locus, (
"Element locus (%s) != Pileup locus (%s)"
% (element.locus, self.locus))
self.elements[element] = None | [
"def",
"append",
"(",
"self",
",",
"element",
")",
":",
"assert",
"element",
".",
"locus",
"==",
"self",
".",
"locus",
",",
"(",
"\"Element locus (%s) != Pileup locus (%s)\"",
"%",
"(",
"element",
".",
"locus",
",",
"self",
".",
"locus",
")",
")",
"self",
".",
"elements",
"[",
"element",
"]",
"=",
"None"
] | Append a PileupElement to this Pileup. If an identical PileupElement is
already part of this Pileup, do nothing. | [
"Append",
"a",
"PileupElement",
"to",
"this",
"Pileup",
".",
"If",
"an",
"identical",
"PileupElement",
"is",
"already",
"part",
"of",
"this",
"Pileup",
"do",
"nothing",
"."
] | 715d3ede5893757b2fcba4117515621bca7b1e5d | https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup.py#L55-L63 | train |
openvax/varlens | varlens/read_evidence/pileup.py | Pileup.update | def update(self, other):
'''
Add all pileup elements from other into self.
'''
assert self.locus == other.locus
self.elements.update(other.elements) | python | def update(self, other):
'''
Add all pileup elements from other into self.
'''
assert self.locus == other.locus
self.elements.update(other.elements) | [
"def",
"update",
"(",
"self",
",",
"other",
")",
":",
"assert",
"self",
".",
"locus",
"==",
"other",
".",
"locus",
"self",
".",
"elements",
".",
"update",
"(",
"other",
".",
"elements",
")"
] | Add all pileup elements from other into self. | [
"Add",
"all",
"pileup",
"elements",
"from",
"other",
"into",
"self",
"."
] | 715d3ede5893757b2fcba4117515621bca7b1e5d | https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup.py#L65-L70 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.