repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.wait_for_port
def wait_for_port(self, port, timeout=10, **probe_kwargs): """ block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None """ Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run()
python
def wait_for_port(self, port, timeout=10, **probe_kwargs): """ block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None """ Probe(timeout=timeout, fnc=functools.partial(self.is_port_open, port), **probe_kwargs).run()
[ "def", "wait_for_port", "(", "self", ",", "port", ",", "timeout", "=", "10", ",", "*", "*", "probe_kwargs", ")", ":", "Probe", "(", "timeout", "=", "timeout", ",", "fnc", "=", "functools", ".", "partial", "(", "self", ".", "is_port_open", ",", "port", ")", ",", "*", "*", "probe_kwargs", ")", ".", "run", "(", ")" ]
block until specified port starts accepting connections, raises an exc ProbeTimeout if timeout is reached :param port: int, port number :param timeout: int or float (seconds), time to wait for establishing the connection :param probe_kwargs: arguments passed to Probe constructor :return: None
[ "block", "until", "specified", "port", "starts", "accepting", "connections", "raises", "an", "exc", "ProbeTimeout", "if", "timeout", "is", "reached" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L221-L231
train
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.mount
def mount(self, mount_point=None): """ mount container filesystem :return: str, the location of the mounted file system """ cmd = ["podman", "mount", self._id or self.get_id()] output = run_cmd(cmd, return_output=True).rstrip("\n\r") return output
python
def mount(self, mount_point=None): """ mount container filesystem :return: str, the location of the mounted file system """ cmd = ["podman", "mount", self._id or self.get_id()] output = run_cmd(cmd, return_output=True).rstrip("\n\r") return output
[ "def", "mount", "(", "self", ",", "mount_point", "=", "None", ")", ":", "cmd", "=", "[", "\"podman\"", ",", "\"mount\"", ",", "self", ".", "_id", "or", "self", ".", "get_id", "(", ")", "]", "output", "=", "run_cmd", "(", "cmd", ",", "return_output", "=", "True", ")", ".", "rstrip", "(", "\"\\n\\r\"", ")", "return", "output" ]
mount container filesystem :return: str, the location of the mounted file system
[ "mount", "container", "filesystem" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L244-L252
train
user-cont/conu
conu/backend/podman/container.py
PodmanContainer.wait
def wait(self, timeout=None): """ Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code """ timeout = ["--interval=%s" % timeout] if timeout else [] cmdline = ["podman", "wait"] + timeout + [self._id or self.get_id()] return run_cmd(cmdline, return_output=True)
python
def wait(self, timeout=None): """ Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code """ timeout = ["--interval=%s" % timeout] if timeout else [] cmdline = ["podman", "wait"] + timeout + [self._id or self.get_id()] return run_cmd(cmdline, return_output=True)
[ "def", "wait", "(", "self", ",", "timeout", "=", "None", ")", ":", "timeout", "=", "[", "\"--interval=%s\"", "%", "timeout", "]", "if", "timeout", "else", "[", "]", "cmdline", "=", "[", "\"podman\"", ",", "\"wait\"", "]", "+", "timeout", "+", "[", "self", ".", "_id", "or", "self", ".", "get_id", "(", ")", "]", "return", "run_cmd", "(", "cmdline", ",", "return_output", "=", "True", ")" ]
Block until the container stops, then return its exit code. Similar to the ``podman wait`` command. :param timeout: int, microseconds to wait before polling for completion :return: int, exit code
[ "Block", "until", "the", "container", "stops", "then", "return", "its", "exit", "code", ".", "Similar", "to", "the", "podman", "wait", "command", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/container.py#L306-L316
train
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.read_file
def read_file(self, file_path): """ read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file """ try: with open(self.p(file_path)) as fd: return fd.read() except IOError as ex: logger.error("error while accessing file %s: %r", file_path, ex) raise ConuException("There was an error while accessing file %s: %r", file_path, ex)
python
def read_file(self, file_path): """ read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file """ try: with open(self.p(file_path)) as fd: return fd.read() except IOError as ex: logger.error("error while accessing file %s: %r", file_path, ex) raise ConuException("There was an error while accessing file %s: %r", file_path, ex)
[ "def", "read_file", "(", "self", ",", "file_path", ")", ":", "try", ":", "with", "open", "(", "self", ".", "p", "(", "file_path", ")", ")", "as", "fd", ":", "return", "fd", ".", "read", "(", ")", "except", "IOError", "as", "ex", ":", "logger", ".", "error", "(", "\"error while accessing file %s: %r\"", ",", "file_path", ",", "ex", ")", "raise", "ConuException", "(", "\"There was an error while accessing file %s: %r\"", ",", "file_path", ",", "ex", ")" ]
read file specified via 'file_path' and return its content - raises an ConuException if there is an issue accessing the file :param file_path: str, path to the file to read :return: str (not bytes), content of the file
[ "read", "file", "specified", "via", "file_path", "and", "return", "its", "content", "-", "raises", "an", "ConuException", "if", "there", "is", "an", "issue", "accessing", "the", "file" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L106-L119
train
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.get_file
def get_file(self, file_path, mode="r"): """ provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance """ return open(self.p(file_path), mode=mode)
python
def get_file(self, file_path, mode="r"): """ provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance """ return open(self.p(file_path), mode=mode)
[ "def", "get_file", "(", "self", ",", "file_path", ",", "mode", "=", "\"r\"", ")", ":", "return", "open", "(", "self", ".", "p", "(", "file_path", ")", ",", "mode", "=", "mode", ")" ]
provide File object specified via 'file_path' :param file_path: str, path to the file :param mode: str, mode used when opening the file :return: File instance
[ "provide", "File", "object", "specified", "via", "file_path" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L121-L129
train
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.file_is_present
def file_is_present(self, file_path): """ check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist """ p = self.p(file_path) if not os.path.exists(p): return False if not os.path.isfile(p): raise IOError("%s is not a file" % file_path) return True
python
def file_is_present(self, file_path): """ check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist """ p = self.p(file_path) if not os.path.exists(p): return False if not os.path.isfile(p): raise IOError("%s is not a file" % file_path) return True
[ "def", "file_is_present", "(", "self", ",", "file_path", ")", ":", "p", "=", "self", ".", "p", "(", "file_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "return", "False", "if", "not", "os", ".", "path", ".", "isfile", "(", "p", ")", ":", "raise", "IOError", "(", "\"%s is not a file\"", "%", "file_path", ")", "return", "True" ]
check if file 'file_path' is present, raises IOError if file_path is not a file :param file_path: str, path to the file :return: True if file exists, False if file does not exist
[ "check", "if", "file", "file_path", "is", "present", "raises", "IOError", "if", "file_path", "is", "not", "a", "file" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L131-L144
train
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.directory_is_present
def directory_is_present(self, directory_path): """ check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist """ p = self.p(directory_path) if not os.path.exists(p): return False if not os.path.isdir(p): raise IOError("%s is not a directory" % directory_path) return True
python
def directory_is_present(self, directory_path): """ check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist """ p = self.p(directory_path) if not os.path.exists(p): return False if not os.path.isdir(p): raise IOError("%s is not a directory" % directory_path) return True
[ "def", "directory_is_present", "(", "self", ",", "directory_path", ")", ":", "p", "=", "self", ".", "p", "(", "directory_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "return", "False", "if", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", ":", "raise", "IOError", "(", "\"%s is not a directory\"", "%", "directory_path", ")", "return", "True" ]
check if directory 'directory_path' is present, raise IOError if it's not a directory :param directory_path: str, directory to check :return: True if directory exists, False if directory does not exist
[ "check", "if", "directory", "directory_path", "is", "present", "raise", "IOError", "if", "it", "s", "not", "a", "directory" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L146-L158
train
user-cont/conu
conu/apidefs/filesystem.py
Filesystem.get_selinux_context
def get_selinux_context(self, file_path): """ Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context """ # what if SELinux is not enabled? p = self.p(file_path) if not HAS_XATTR: raise RuntimeError("'xattr' python module is not available, hence we cannot " "determine the SELinux context for this file. " "In Fedora this module is available as python3-pyxattr -- " "other distributions may follow similar naming scheme.") return xattr.get(p, "security.selinux")
python
def get_selinux_context(self, file_path): """ Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context """ # what if SELinux is not enabled? p = self.p(file_path) if not HAS_XATTR: raise RuntimeError("'xattr' python module is not available, hence we cannot " "determine the SELinux context for this file. " "In Fedora this module is available as python3-pyxattr -- " "other distributions may follow similar naming scheme.") return xattr.get(p, "security.selinux")
[ "def", "get_selinux_context", "(", "self", ",", "file_path", ")", ":", "# what if SELinux is not enabled?", "p", "=", "self", ".", "p", "(", "file_path", ")", "if", "not", "HAS_XATTR", ":", "raise", "RuntimeError", "(", "\"'xattr' python module is not available, hence we cannot \"", "\"determine the SELinux context for this file. \"", "\"In Fedora this module is available as python3-pyxattr -- \"", "\"other distributions may follow similar naming scheme.\"", ")", "return", "xattr", ".", "get", "(", "p", ",", "\"security.selinux\"", ")" ]
Get SELinux file context of the selected file. :param file_path: str, path to the file :return: str, name of the SELinux file context
[ "Get", "SELinux", "file", "context", "of", "the", "selected", "file", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/filesystem.py#L160-L174
train
user-cont/conu
conu/utils/probes.py
Probe._wrapper
def _wrapper(self, q, start): """ _wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception """ try: func_name = self.fnc.__name__ except AttributeError: func_name = str(self.fnc) logger.debug("Running \"%s\" with parameters: \"%s\":\t%s/%s" % (func_name, str(self.kwargs), round(time.time() - start), self.timeout)) try: result = self.fnc(**self.kwargs) # let's log only first 50 characters of the response logger.debug("callback result = %s", str(result)[:50]) q.put(result) except self.expected_exceptions as ex: logger.debug("expected exception was caught: %s", ex) q.put(False) except Exception as ex: logger.debug("adding exception %s to queue", ex) q.put(ex)
python
def _wrapper(self, q, start): """ _wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception """ try: func_name = self.fnc.__name__ except AttributeError: func_name = str(self.fnc) logger.debug("Running \"%s\" with parameters: \"%s\":\t%s/%s" % (func_name, str(self.kwargs), round(time.time() - start), self.timeout)) try: result = self.fnc(**self.kwargs) # let's log only first 50 characters of the response logger.debug("callback result = %s", str(result)[:50]) q.put(result) except self.expected_exceptions as ex: logger.debug("expected exception was caught: %s", ex) q.put(False) except Exception as ex: logger.debug("adding exception %s to queue", ex) q.put(ex)
[ "def", "_wrapper", "(", "self", ",", "q", ",", "start", ")", ":", "try", ":", "func_name", "=", "self", ".", "fnc", ".", "__name__", "except", "AttributeError", ":", "func_name", "=", "str", "(", "self", ".", "fnc", ")", "logger", ".", "debug", "(", "\"Running \\\"%s\\\" with parameters: \\\"%s\\\":\\t%s/%s\"", "%", "(", "func_name", ",", "str", "(", "self", ".", "kwargs", ")", ",", "round", "(", "time", ".", "time", "(", ")", "-", "start", ")", ",", "self", ".", "timeout", ")", ")", "try", ":", "result", "=", "self", ".", "fnc", "(", "*", "*", "self", ".", "kwargs", ")", "# let's log only first 50 characters of the response", "logger", ".", "debug", "(", "\"callback result = %s\"", ",", "str", "(", "result", ")", "[", ":", "50", "]", ")", "q", ".", "put", "(", "result", ")", "except", "self", ".", "expected_exceptions", "as", "ex", ":", "logger", ".", "debug", "(", "\"expected exception was caught: %s\"", ",", "ex", ")", "q", ".", "put", "(", "False", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "debug", "(", "\"adding exception %s to queue\"", ",", "ex", ")", "q", ".", "put", "(", "ex", ")" ]
_wrapper checks return status of Probe.fnc and provides the result for process managing :param q: Queue for function results :param start: Time of function run (used for logging) :return: Return value or Exception
[ "_wrapper", "checks", "return", "status", "of", "Probe", ".", "fnc", "and", "provides", "the", "result", "for", "process", "managing" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/probes.py#L92-L116
train
user-cont/conu
conu/backend/docker/skopeo.py
transport_param
def transport_param(image): """ Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image """ transports = {SkopeoTransport.CONTAINERS_STORAGE: "containers-storage:", SkopeoTransport.DIRECTORY: "dir:", SkopeoTransport.DOCKER: "docker://", SkopeoTransport.DOCKER_ARCHIVE: "docker-archive", SkopeoTransport.DOCKER_DAEMON: "docker-daemon:", SkopeoTransport.OCI: "oci:", SkopeoTransport.OSTREE: "ostree:"} transport = image.transport tag = image.tag repository = image.name path = image.path if not transport: transport = SkopeoTransport.DOCKER command = transports[transport] path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required and path is None: raise ValueError(transports[transport] + " path is required to be specified") if transport == SkopeoTransport.DIRECTORY: return command + path if transport == SkopeoTransport.DOCKER_ARCHIVE: command += path if repository is None: return command command += ":" if transport in [SkopeoTransport.CONTAINERS_STORAGE, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_ARCHIVE, transport.DOCKER_DAEMON]: return command + repository + ":" + tag if transport == SkopeoTransport.OCI: return command + path + ":" + tag if transport == SkopeoTransport.OSTREE: return command + repository + ("@" + path if path else "") raise ConuException("This transport is not supported")
python
def transport_param(image): """ Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image """ transports = {SkopeoTransport.CONTAINERS_STORAGE: "containers-storage:", SkopeoTransport.DIRECTORY: "dir:", SkopeoTransport.DOCKER: "docker://", SkopeoTransport.DOCKER_ARCHIVE: "docker-archive", SkopeoTransport.DOCKER_DAEMON: "docker-daemon:", SkopeoTransport.OCI: "oci:", SkopeoTransport.OSTREE: "ostree:"} transport = image.transport tag = image.tag repository = image.name path = image.path if not transport: transport = SkopeoTransport.DOCKER command = transports[transport] path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required and path is None: raise ValueError(transports[transport] + " path is required to be specified") if transport == SkopeoTransport.DIRECTORY: return command + path if transport == SkopeoTransport.DOCKER_ARCHIVE: command += path if repository is None: return command command += ":" if transport in [SkopeoTransport.CONTAINERS_STORAGE, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_ARCHIVE, transport.DOCKER_DAEMON]: return command + repository + ":" + tag if transport == SkopeoTransport.OCI: return command + path + ":" + tag if transport == SkopeoTransport.OSTREE: return command + repository + ("@" + path if path else "") raise ConuException("This transport is not supported")
[ "def", "transport_param", "(", "image", ")", ":", "transports", "=", "{", "SkopeoTransport", ".", "CONTAINERS_STORAGE", ":", "\"containers-storage:\"", ",", "SkopeoTransport", ".", "DIRECTORY", ":", "\"dir:\"", ",", "SkopeoTransport", ".", "DOCKER", ":", "\"docker://\"", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ":", "\"docker-archive\"", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ":", "\"docker-daemon:\"", ",", "SkopeoTransport", ".", "OCI", ":", "\"oci:\"", ",", "SkopeoTransport", ".", "OSTREE", ":", "\"ostree:\"", "}", "transport", "=", "image", ".", "transport", "tag", "=", "image", ".", "tag", "repository", "=", "image", ".", "name", "path", "=", "image", ".", "path", "if", "not", "transport", ":", "transport", "=", "SkopeoTransport", ".", "DOCKER", "command", "=", "transports", "[", "transport", "]", "path_required", "=", "[", "SkopeoTransport", ".", "DIRECTORY", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "SkopeoTransport", ".", "OCI", "]", "if", "transport", "in", "path_required", "and", "path", "is", "None", ":", "raise", "ValueError", "(", "transports", "[", "transport", "]", "+", "\" path is required to be specified\"", ")", "if", "transport", "==", "SkopeoTransport", ".", "DIRECTORY", ":", "return", "command", "+", "path", "if", "transport", "==", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ":", "command", "+=", "path", "if", "repository", "is", "None", ":", "return", "command", "command", "+=", "\":\"", "if", "transport", "in", "[", "SkopeoTransport", ".", "CONTAINERS_STORAGE", ",", "SkopeoTransport", ".", "DOCKER", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "transport", ".", "DOCKER_DAEMON", "]", ":", "return", "command", "+", "repository", "+", "\":\"", "+", "tag", "if", "transport", "==", "SkopeoTransport", ".", "OCI", ":", "return", "command", "+", "path", "+", "\":\"", "+", "tag", "if", "transport", "==", "SkopeoTransport", ".", "OSTREE", ":", "return", "command", "+", "repository", "+", "(", "\"@\"", "+", "path", "if", "path", "else", "\"\"", ")", "raise", "ConuException", "(", "\"This transport is not supported\"", ")" ]
Parse DockerImage info into skopeo parameter :param image: DockerImage :return: string. skopeo parameter specifying image
[ "Parse", "DockerImage", "info", "into", "skopeo", "parameter" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/skopeo.py#L23-L65
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.is_running
def is_running(self): """ return True when container is running, otherwise return False :return: bool """ cmd = ["machinectl", "--no-pager", "status", self.name] try: subprocess.check_call(cmd) return True except subprocess.CalledProcessError as ex: logger.info("nspawn container %s is not running probably: %s", self.name, ex.output) return False
python
def is_running(self): """ return True when container is running, otherwise return False :return: bool """ cmd = ["machinectl", "--no-pager", "status", self.name] try: subprocess.check_call(cmd) return True except subprocess.CalledProcessError as ex: logger.info("nspawn container %s is not running probably: %s", self.name, ex.output) return False
[ "def", "is_running", "(", "self", ")", ":", "cmd", "=", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"status\"", ",", "self", ".", "name", "]", "try", ":", "subprocess", ".", "check_call", "(", "cmd", ")", "return", "True", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "logger", ".", "info", "(", "\"nspawn container %s is not running probably: %s\"", ",", "self", ".", "name", ",", "ex", ".", "output", ")", "return", "False" ]
return True when container is running, otherwise return False :return: bool
[ "return", "True", "when", "container", "is", "running", "otherwise", "return", "False" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L147-L160
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.copy_from
def copy_from(self, src, dest): """ copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None """ logger.debug("copying %s from host to container at %s", src, dest) cmd = ["machinectl", "--no-pager", "copy-from", self.name, src, dest] run_cmd(cmd)
python
def copy_from(self, src, dest): """ copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None """ logger.debug("copying %s from host to container at %s", src, dest) cmd = ["machinectl", "--no-pager", "copy-from", self.name, src, dest] run_cmd(cmd)
[ "def", "copy_from", "(", "self", ",", "src", ",", "dest", ")", ":", "logger", ".", "debug", "(", "\"copying %s from host to container at %s\"", ",", "src", ",", "dest", ")", "cmd", "=", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"copy-from\"", ",", "self", ".", "name", ",", "src", ",", "dest", "]", "run_cmd", "(", "cmd", ")" ]
copy a file or a directory from container or image to host system. :param src: str, path to a file or a directory within container or image :param dest: str, path to a file or a directory on host system :return: None
[ "copy", "a", "file", "or", "a", "directory", "from", "container", "or", "image", "to", "host", "system", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L174-L184
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.delete
def delete(self, force=False, volumes=False): """ delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None """ try: self.image.rmi() except ConuException as ime: if not force: raise ime else: pass
python
def delete(self, force=False, volumes=False): """ delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None """ try: self.image.rmi() except ConuException as ime: if not force: raise ime else: pass
[ "def", "delete", "(", "self", ",", "force", "=", "False", ",", "volumes", "=", "False", ")", ":", "try", ":", "self", ".", "image", ".", "rmi", "(", ")", "except", "ConuException", "as", "ime", ":", "if", "not", "force", ":", "raise", "ime", "else", ":", "pass" ]
delete underlying image :param force: bool - force delete, do not care about errors :param volumes: not used anyhow :return: None
[ "delete", "underlying", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L220-L234
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.cleanup
def cleanup(self, force=False, delete=False): """ Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None """ # TODO: this method could be part of API, like: try: self.stop() except subprocess.CalledProcessError as stop: logger.debug("unable to stop container via stop", stop) if not force: raise stop try: self.kill() except subprocess.CalledProcessError as kill: logger.debug("unable to stop container via kill", kill) pass if delete: self.delete(force=force)
python
def cleanup(self, force=False, delete=False): """ Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None """ # TODO: this method could be part of API, like: try: self.stop() except subprocess.CalledProcessError as stop: logger.debug("unable to stop container via stop", stop) if not force: raise stop try: self.kill() except subprocess.CalledProcessError as kill: logger.debug("unable to stop container via kill", kill) pass if delete: self.delete(force=force)
[ "def", "cleanup", "(", "self", ",", "force", "=", "False", ",", "delete", "=", "False", ")", ":", "# TODO: this method could be part of API, like:", "try", ":", "self", ".", "stop", "(", ")", "except", "subprocess", ".", "CalledProcessError", "as", "stop", ":", "logger", ".", "debug", "(", "\"unable to stop container via stop\"", ",", "stop", ")", "if", "not", "force", ":", "raise", "stop", "try", ":", "self", ".", "kill", "(", ")", "except", "subprocess", ".", "CalledProcessError", "as", "kill", ":", "logger", ".", "debug", "(", "\"unable to stop container via kill\"", ",", "kill", ")", "pass", "if", "delete", ":", "self", ".", "delete", "(", "force", "=", "force", ")" ]
Stop container and delete image if given param delete :param force: bool, force stop and delete, no errors raised :param delete: delete images :return: None
[ "Stop", "container", "and", "delete", "image", "if", "given", "param", "delete" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L236-L257
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.run_systemdrun
def run_systemdrun( self, command, internal_background=False, return_full_dict=False, **kwargs): """ execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result """ internalkw = deepcopy(kwargs) or {} original_ignore_st = internalkw.get("ignore_status", False) original_return_st = internalkw.get("return_output", False) internalkw["ignore_status"] = True internalkw["return_output"] = False unit_name = constants.CONU_ARTIFACT_TAG + "unit_" + random_str() opts = ["-M", self.name, "--unit", unit_name] lpath = "/var/tmp/{}".format(unit_name) comout = {} if self._run_systemdrun_decide(): add_wait_var = "--wait" else: # keep service exist after it finish, to be able to read exit code add_wait_var = "-r" if internal_background: add_wait_var = "" if add_wait_var: opts.append(add_wait_var) # TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run # does not support return stderr, stdout, and return code directly # find way how to do this in better way, machinectl shell is not possible # https://github.com/systemd/systemd/issues/5879 # https://github.com/systemd/systemd/issues/5878 bashworkaround = [ "/bin/bash", "-c", "({comm})>{path}.stdout 2>{path}.stderr".format( comm=" ".join(command), path=lpath)] whole_cmd = ["systemd-run"] + opts + bashworkaround comout['command'] = command comout['return_code'] = run_cmd(whole_cmd, **internalkw) or 0 if not internal_background: if not self._run_systemdrun_decide(): comout['return_code'] = self._systemctl_wait_until_finish( self.name, unit_name) if self.is_running(): self.copy_from( "{pin}.stdout".format( pin=lpath), "{pin}.stdout".format( pin=lpath)) with open("{pin}.stdout".format(pin=lpath)) as f: comout['stdout'] = f.read() self.copy_from( "{pin}.stderr".format( pin=lpath), "{pin}.stderr".format( pin=lpath)) with open("{pin}.stderr".format(pin=lpath)) as f: comout['stderr'] = f.read() logger.debug(comout) if not original_ignore_st and comout['return_code'] != 0: raise subprocess.CalledProcessError(comout['command'], comout) if return_full_dict: return comout if original_return_st: return comout['stdout'] else: return comout['return_code']
python
def run_systemdrun( self, command, internal_background=False, return_full_dict=False, **kwargs): """ execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result """ internalkw = deepcopy(kwargs) or {} original_ignore_st = internalkw.get("ignore_status", False) original_return_st = internalkw.get("return_output", False) internalkw["ignore_status"] = True internalkw["return_output"] = False unit_name = constants.CONU_ARTIFACT_TAG + "unit_" + random_str() opts = ["-M", self.name, "--unit", unit_name] lpath = "/var/tmp/{}".format(unit_name) comout = {} if self._run_systemdrun_decide(): add_wait_var = "--wait" else: # keep service exist after it finish, to be able to read exit code add_wait_var = "-r" if internal_background: add_wait_var = "" if add_wait_var: opts.append(add_wait_var) # TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run # does not support return stderr, stdout, and return code directly # find way how to do this in better way, machinectl shell is not possible # https://github.com/systemd/systemd/issues/5879 # https://github.com/systemd/systemd/issues/5878 bashworkaround = [ "/bin/bash", "-c", "({comm})>{path}.stdout 2>{path}.stderr".format( comm=" ".join(command), path=lpath)] whole_cmd = ["systemd-run"] + opts + bashworkaround comout['command'] = command comout['return_code'] = run_cmd(whole_cmd, **internalkw) or 0 if not internal_background: if not self._run_systemdrun_decide(): comout['return_code'] = self._systemctl_wait_until_finish( self.name, unit_name) if self.is_running(): self.copy_from( "{pin}.stdout".format( pin=lpath), "{pin}.stdout".format( pin=lpath)) with open("{pin}.stdout".format(pin=lpath)) as f: comout['stdout'] = f.read() self.copy_from( "{pin}.stderr".format( pin=lpath), "{pin}.stderr".format( pin=lpath)) with open("{pin}.stderr".format(pin=lpath)) as f: comout['stderr'] = f.read() logger.debug(comout) if not original_ignore_st and comout['return_code'] != 0: raise subprocess.CalledProcessError(comout['command'], comout) if return_full_dict: return comout if original_return_st: return comout['stdout'] else: return comout['return_code']
[ "def", "run_systemdrun", "(", "self", ",", "command", ",", "internal_background", "=", "False", ",", "return_full_dict", "=", "False", ",", "*", "*", "kwargs", ")", ":", "internalkw", "=", "deepcopy", "(", "kwargs", ")", "or", "{", "}", "original_ignore_st", "=", "internalkw", ".", "get", "(", "\"ignore_status\"", ",", "False", ")", "original_return_st", "=", "internalkw", ".", "get", "(", "\"return_output\"", ",", "False", ")", "internalkw", "[", "\"ignore_status\"", "]", "=", "True", "internalkw", "[", "\"return_output\"", "]", "=", "False", "unit_name", "=", "constants", ".", "CONU_ARTIFACT_TAG", "+", "\"unit_\"", "+", "random_str", "(", ")", "opts", "=", "[", "\"-M\"", ",", "self", ".", "name", ",", "\"--unit\"", ",", "unit_name", "]", "lpath", "=", "\"/var/tmp/{}\"", ".", "format", "(", "unit_name", ")", "comout", "=", "{", "}", "if", "self", ".", "_run_systemdrun_decide", "(", ")", ":", "add_wait_var", "=", "\"--wait\"", "else", ":", "# keep service exist after it finish, to be able to read exit code", "add_wait_var", "=", "\"-r\"", "if", "internal_background", ":", "add_wait_var", "=", "\"\"", "if", "add_wait_var", ":", "opts", ".", "append", "(", "add_wait_var", ")", "# TODO: behave move similar to run_cmd function, unable to work with clean subprocess objects because systemd-run", "# does not support return stderr, stdout, and return code directly", "# find way how to do this in better way, machinectl shell is not possible", "# https://github.com/systemd/systemd/issues/5879", "# https://github.com/systemd/systemd/issues/5878", "bashworkaround", "=", "[", "\"/bin/bash\"", ",", "\"-c\"", ",", "\"({comm})>{path}.stdout 2>{path}.stderr\"", ".", "format", "(", "comm", "=", "\" \"", ".", "join", "(", "command", ")", ",", "path", "=", "lpath", ")", "]", "whole_cmd", "=", "[", "\"systemd-run\"", "]", "+", "opts", "+", "bashworkaround", "comout", "[", "'command'", "]", "=", "command", "comout", "[", "'return_code'", "]", "=", "run_cmd", "(", "whole_cmd", ",", "*", "*", "internalkw", ")", "or", "0", "if", "not", "internal_background", ":", "if", "not", "self", ".", "_run_systemdrun_decide", "(", ")", ":", "comout", "[", "'return_code'", "]", "=", "self", ".", "_systemctl_wait_until_finish", "(", "self", ".", "name", ",", "unit_name", ")", "if", "self", ".", "is_running", "(", ")", ":", "self", ".", "copy_from", "(", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ",", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "with", "open", "(", "\"{pin}.stdout\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "as", "f", ":", "comout", "[", "'stdout'", "]", "=", "f", ".", "read", "(", ")", "self", ".", "copy_from", "(", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ",", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "with", "open", "(", "\"{pin}.stderr\"", ".", "format", "(", "pin", "=", "lpath", ")", ")", "as", "f", ":", "comout", "[", "'stderr'", "]", "=", "f", ".", "read", "(", ")", "logger", ".", "debug", "(", "comout", ")", "if", "not", "original_ignore_st", "and", "comout", "[", "'return_code'", "]", "!=", "0", ":", "raise", "subprocess", ".", "CalledProcessError", "(", "comout", "[", "'command'", "]", ",", "comout", ")", "if", "return_full_dict", ":", "return", "comout", "if", "original_return_st", ":", "return", "comout", "[", "'stdout'", "]", "else", ":", "return", "comout", "[", "'return_code'", "]" ]
execute command via systemd-run inside container :param command: list of command params :param internal_background: not used now :param kwargs: pass params to subprocess :return: dict with result
[ "execute", "command", "via", "systemd", "-", "run", "inside", "container" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L306-L374
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer._wait_for_machine_booted
def _wait_for_machine_booted(name, suffictinet_texts=None): """ Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception """ # TODO: rewrite it using probes module in utils suffictinet_texts = suffictinet_texts or ["systemd-logind"] # optionally use: "Unit: machine" for foo in range(constants.DEFAULT_RETRYTIMEOUT): time.sleep(constants.DEFAULT_SLEEP) out = run_cmd( ["machinectl", "--no-pager", "status", name], ignore_status=True, return_output=True) for restr in suffictinet_texts: if restr in out: time.sleep(constants.DEFAULT_SLEEP) return True raise ConuException( "Unable to start machine %s within %d (machinectl status command dos not contain %s)" % (name, constants.DEFAULT_RETRYTIMEOUT, suffictinet_texts))
python
def _wait_for_machine_booted(name, suffictinet_texts=None): """ Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception """ # TODO: rewrite it using probes module in utils suffictinet_texts = suffictinet_texts or ["systemd-logind"] # optionally use: "Unit: machine" for foo in range(constants.DEFAULT_RETRYTIMEOUT): time.sleep(constants.DEFAULT_SLEEP) out = run_cmd( ["machinectl", "--no-pager", "status", name], ignore_status=True, return_output=True) for restr in suffictinet_texts: if restr in out: time.sleep(constants.DEFAULT_SLEEP) return True raise ConuException( "Unable to start machine %s within %d (machinectl status command dos not contain %s)" % (name, constants.DEFAULT_RETRYTIMEOUT, suffictinet_texts))
[ "def", "_wait_for_machine_booted", "(", "name", ",", "suffictinet_texts", "=", "None", ")", ":", "# TODO: rewrite it using probes module in utils", "suffictinet_texts", "=", "suffictinet_texts", "or", "[", "\"systemd-logind\"", "]", "# optionally use: \"Unit: machine\"", "for", "foo", "in", "range", "(", "constants", ".", "DEFAULT_RETRYTIMEOUT", ")", ":", "time", ".", "sleep", "(", "constants", ".", "DEFAULT_SLEEP", ")", "out", "=", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"status\"", ",", "name", "]", ",", "ignore_status", "=", "True", ",", "return_output", "=", "True", ")", "for", "restr", "in", "suffictinet_texts", ":", "if", "restr", "in", "out", ":", "time", ".", "sleep", "(", "constants", ".", "DEFAULT_SLEEP", ")", "return", "True", "raise", "ConuException", "(", "\"Unable to start machine %s within %d (machinectl status command dos not contain %s)\"", "%", "(", "name", ",", "constants", ".", "DEFAULT_RETRYTIMEOUT", ",", "suffictinet_texts", ")", ")" ]
Internal method wait until machine is ready, in common case means there is running systemd-logind :param name: str with machine name :param suffictinet_texts: alternative text to check in output :return: True or exception
[ "Internal", "method", "wait", "until", "machine", "is", "ready", "in", "common", "case", "means", "there", "is", "running", "systemd", "-", "logind" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L408-L431
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer._internal_reschedule
def _internal_reschedule(callback, retry=3, sleep_time=constants.DEFAULT_SLEEP): """ workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object """ for foo in range(retry): container_process = callback[0](callback[1], *callback[2], **callback[3]) time.sleep(sleep_time) container_process.poll() rcode = container_process.returncode if rcode is None: return container_process raise ConuException("Unable to start nspawn container - process failed for {}-times".format(retry))
python
def _internal_reschedule(callback, retry=3, sleep_time=constants.DEFAULT_SLEEP): """ workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object """ for foo in range(retry): container_process = callback[0](callback[1], *callback[2], **callback[3]) time.sleep(sleep_time) container_process.poll() rcode = container_process.returncode if rcode is None: return container_process raise ConuException("Unable to start nspawn container - process failed for {}-times".format(retry))
[ "def", "_internal_reschedule", "(", "callback", ",", "retry", "=", "3", ",", "sleep_time", "=", "constants", ".", "DEFAULT_SLEEP", ")", ":", "for", "foo", "in", "range", "(", "retry", ")", ":", "container_process", "=", "callback", "[", "0", "]", "(", "callback", "[", "1", "]", ",", "*", "callback", "[", "2", "]", ",", "*", "*", "callback", "[", "3", "]", ")", "time", ".", "sleep", "(", "sleep_time", ")", "container_process", ".", "poll", "(", ")", "rcode", "=", "container_process", ".", "returncode", "if", "rcode", "is", "None", ":", "return", "container_process", "raise", "ConuException", "(", "\"Unable to start nspawn container - process failed for {}-times\"", ".", "format", "(", "retry", ")", ")" ]
workaround method for internal_run_container method It sometimes fails because of Dbus or whatever, so try to start it moretimes :param callback: callback method list :param retry: how many times try to invoke command :param sleep_time: how long wait before subprocess.poll() to find if it failed :return: subprocess object
[ "workaround", "method", "for", "internal_run_container", "method", "It", "sometimes", "fails", "because", "of", "Dbus", "or", "whatever", "so", "try", "to", "start", "it", "moretimes" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L434-L451
train
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.internal_run_container
def internal_run_container(name, callback_method, foreground=False): """ Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance """ if not foreground: logger.info("Stating machine (boot nspawn container) {}".format(name)) # wait until machine is booted when running at background, unable to execute commands without logind # in running container nspawn_process = NspawnContainer._internal_reschedule(callback_method) NspawnContainer._wait_for_machine_booted(name) logger.info("machine: %s starting finished" % name) return nspawn_process else: logger.info("Stating machine (return process) {}".format(name)) return callback_method[0](callback_method[1], *callback_method[2], **callback_method[3])
python
def internal_run_container(name, callback_method, foreground=False): """ Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance """ if not foreground: logger.info("Stating machine (boot nspawn container) {}".format(name)) # wait until machine is booted when running at background, unable to execute commands without logind # in running container nspawn_process = NspawnContainer._internal_reschedule(callback_method) NspawnContainer._wait_for_machine_booted(name) logger.info("machine: %s starting finished" % name) return nspawn_process else: logger.info("Stating machine (return process) {}".format(name)) return callback_method[0](callback_method[1], *callback_method[2], **callback_method[3])
[ "def", "internal_run_container", "(", "name", ",", "callback_method", ",", "foreground", "=", "False", ")", ":", "if", "not", "foreground", ":", "logger", ".", "info", "(", "\"Stating machine (boot nspawn container) {}\"", ".", "format", "(", "name", ")", ")", "# wait until machine is booted when running at background, unable to execute commands without logind", "# in running container", "nspawn_process", "=", "NspawnContainer", ".", "_internal_reschedule", "(", "callback_method", ")", "NspawnContainer", ".", "_wait_for_machine_booted", "(", "name", ")", "logger", ".", "info", "(", "\"machine: %s starting finished\"", "%", "name", ")", "return", "nspawn_process", "else", ":", "logger", ".", "info", "(", "\"Stating machine (return process) {}\"", ".", "format", "(", "name", ")", ")", "return", "callback_method", "[", "0", "]", "(", "callback_method", "[", "1", "]", ",", "*", "callback_method", "[", "2", "]", ",", "*", "*", "callback_method", "[", "3", "]", ")" ]
Internal method what runs container process :param name: str - name of container :param callback_method: list - how to invoke container :param foreground: bool run in background by default :return: suprocess instance
[ "Internal", "method", "what", "runs", "container", "process" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/container.py#L455-L474
train
user-cont/conu
conu/helpers/docker_backend.py
get_container_output
def get_container_output(backend, image_name, command, image_tag="latest", additional_opts=None): """ Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container """ image = backend.ImageClass(image_name, tag=image_tag) # FIXME: use run_via_api and make this a generic function c = image.run_via_binary(DockerRunBuilder(command=command, additional_opts=additional_opts)) try: c.wait() return c.logs_unicode() finally: c.stop() c.wait() c.delete()
python
def get_container_output(backend, image_name, command, image_tag="latest", additional_opts=None): """ Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container """ image = backend.ImageClass(image_name, tag=image_tag) # FIXME: use run_via_api and make this a generic function c = image.run_via_binary(DockerRunBuilder(command=command, additional_opts=additional_opts)) try: c.wait() return c.logs_unicode() finally: c.stop() c.wait() c.delete()
[ "def", "get_container_output", "(", "backend", ",", "image_name", ",", "command", ",", "image_tag", "=", "\"latest\"", ",", "additional_opts", "=", "None", ")", ":", "image", "=", "backend", ".", "ImageClass", "(", "image_name", ",", "tag", "=", "image_tag", ")", "# FIXME: use run_via_api and make this a generic function", "c", "=", "image", ".", "run_via_binary", "(", "DockerRunBuilder", "(", "command", "=", "command", ",", "additional_opts", "=", "additional_opts", ")", ")", "try", ":", "c", ".", "wait", "(", ")", "return", "c", ".", "logs_unicode", "(", ")", "finally", ":", "c", ".", "stop", "(", ")", "c", ".", "wait", "(", ")", "c", ".", "delete", "(", ")" ]
Create a throw-away container based on provided image and tag, run the supplied command in it and return output. The container is stopped and removed after it exits. :param backend: instance of DockerBackend :param image_name: str, name of the container image :param command: list of str, command to run in the container :param image_tag: str, container image tag, defaults to "latest" :param additional_opts: list of str, by default this function creates the container using docker binary and run command; with this argument you can supply addition options to the "docker run" invocation :return: str (unicode), output of the container
[ "Create", "a", "throw", "-", "away", "container", "based", "on", "provided", "image", "and", "tag", "run", "the", "supplied", "command", "in", "it", "and", "return", "output", ".", "The", "container", "is", "stopped", "and", "removed", "after", "it", "exits", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/helpers/docker_backend.py#L4-L28
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.pull
def pull(self): """ Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None """ for json_e in self.d.pull(repository=self.name, tag=self.tag, stream=True, decode=True): logger.debug(json_e) status = graceful_get(json_e, "status") if status: logger.info(status) else: error = graceful_get(json_e, "error") logger.error(status) raise ConuException("There was an error while pulling the image %s: %s", self.name, error) self.using_transport(SkopeoTransport.DOCKER_DAEMON)
python
def pull(self): """ Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None """ for json_e in self.d.pull(repository=self.name, tag=self.tag, stream=True, decode=True): logger.debug(json_e) status = graceful_get(json_e, "status") if status: logger.info(status) else: error = graceful_get(json_e, "error") logger.error(status) raise ConuException("There was an error while pulling the image %s: %s", self.name, error) self.using_transport(SkopeoTransport.DOCKER_DAEMON)
[ "def", "pull", "(", "self", ")", ":", "for", "json_e", "in", "self", ".", "d", ".", "pull", "(", "repository", "=", "self", ".", "name", ",", "tag", "=", "self", ".", "tag", ",", "stream", "=", "True", ",", "decode", "=", "True", ")", ":", "logger", ".", "debug", "(", "json_e", ")", "status", "=", "graceful_get", "(", "json_e", ",", "\"status\"", ")", "if", "status", ":", "logger", ".", "info", "(", "status", ")", "else", ":", "error", "=", "graceful_get", "(", "json_e", ",", "\"error\"", ")", "logger", ".", "error", "(", "status", ")", "raise", "ConuException", "(", "\"There was an error while pulling the image %s: %s\"", ",", "self", ".", "name", ",", "error", ")", "self", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER_DAEMON", ")" ]
Pull this image from registry. Raises an exception if the image is not found in the registry. :return: None
[ "Pull", "this", "image", "from", "registry", ".", "Raises", "an", "exception", "if", "the", "image", "is", "not", "found", "in", "the", "registry", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L185-L202
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.using_transport
def using_transport(self, transport=None, path=None, logs=True): """ change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self """ if not transport: return self if self.transport == transport and self.path == path: return self path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required: if not path and logs: logging.debug("path not provided, temporary path was used") self.path = self.mount(path).mount_point elif transport == SkopeoTransport.OSTREE: if path and not os.path.isabs(path): raise ConuException("Path '", path, "' for OSTree transport is not absolute") if not path and logs: logging.debug("path not provided, default /ostree/repo path was used") self.path = path else: if path and logs: logging.warning("path %s was ignored!", path) self.path = None self.transport = transport return self
python
def using_transport(self, transport=None, path=None, logs=True): """ change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self """ if not transport: return self if self.transport == transport and self.path == path: return self path_required = [SkopeoTransport.DIRECTORY, SkopeoTransport.DOCKER_ARCHIVE, SkopeoTransport.OCI] if transport in path_required: if not path and logs: logging.debug("path not provided, temporary path was used") self.path = self.mount(path).mount_point elif transport == SkopeoTransport.OSTREE: if path and not os.path.isabs(path): raise ConuException("Path '", path, "' for OSTree transport is not absolute") if not path and logs: logging.debug("path not provided, default /ostree/repo path was used") self.path = path else: if path and logs: logging.warning("path %s was ignored!", path) self.path = None self.transport = transport return self
[ "def", "using_transport", "(", "self", ",", "transport", "=", "None", ",", "path", "=", "None", ",", "logs", "=", "True", ")", ":", "if", "not", "transport", ":", "return", "self", "if", "self", ".", "transport", "==", "transport", "and", "self", ".", "path", "==", "path", ":", "return", "self", "path_required", "=", "[", "SkopeoTransport", ".", "DIRECTORY", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "SkopeoTransport", ".", "OCI", "]", "if", "transport", "in", "path_required", ":", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, temporary path was used\"", ")", "self", ".", "path", "=", "self", ".", "mount", "(", "path", ")", ".", "mount_point", "elif", "transport", "==", "SkopeoTransport", ".", "OSTREE", ":", "if", "path", "and", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "raise", "ConuException", "(", "\"Path '\"", ",", "path", ",", "\"' for OSTree transport is not absolute\"", ")", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, default /ostree/repo path was used\"", ")", "self", ".", "path", "=", "path", "else", ":", "if", "path", "and", "logs", ":", "logging", ".", "warning", "(", "\"path %s was ignored!\"", ",", "path", ")", "self", ".", "path", "=", "None", "self", ".", "transport", "=", "transport", "return", "self" ]
change used transport :param transport: from where will be this image copied :param path in filesystem :param logs enable/disable :return: self
[ "change", "used", "transport" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L230-L264
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.save_to
def save_to(self, image): """ Save this image to another DockerImage :param image: DockerImage :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid target image type", type(image)) self.copy(image.name, image.tag, target_transport=image.transport, target_path=image.path, logs=False)
python
def save_to(self, image): """ Save this image to another DockerImage :param image: DockerImage :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid target image type", type(image)) self.copy(image.name, image.tag, target_transport=image.transport, target_path=image.path, logs=False)
[ "def", "save_to", "(", "self", ",", "image", ")", ":", "if", "not", "isinstance", "(", "image", ",", "self", ".", "__class__", ")", ":", "raise", "ConuException", "(", "\"Invalid target image type\"", ",", "type", "(", "image", ")", ")", "self", ".", "copy", "(", "image", ".", "name", ",", "image", ".", "tag", ",", "target_transport", "=", "image", ".", "transport", ",", "target_path", "=", "image", ".", "path", ",", "logs", "=", "False", ")" ]
Save this image to another DockerImage :param image: DockerImage :return:
[ "Save", "this", "image", "to", "another", "DockerImage" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L266-L276
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.load_from
def load_from(self, image): """ Load from another DockerImage to this one :param image: :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid source image type", type(image)) image.save_to(self)
python
def load_from(self, image): """ Load from another DockerImage to this one :param image: :return: """ if not isinstance(image, self.__class__): raise ConuException("Invalid source image type", type(image)) image.save_to(self)
[ "def", "load_from", "(", "self", ",", "image", ")", ":", "if", "not", "isinstance", "(", "image", ",", "self", ".", "__class__", ")", ":", "raise", "ConuException", "(", "\"Invalid source image type\"", ",", "type", "(", "image", ")", ")", "image", ".", "save_to", "(", "self", ")" ]
Load from another DockerImage to this one :param image: :return:
[ "Load", "from", "another", "DockerImage", "to", "this", "one" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L278-L286
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.skopeo_pull
def skopeo_pull(self): """ Pull image from Docker to local Docker daemon using skopeo :return: pulled image """ return self.copy(self.name, self.tag, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_DAEMON)\ .using_transport(SkopeoTransport.DOCKER_DAEMON)
python
def skopeo_pull(self): """ Pull image from Docker to local Docker daemon using skopeo :return: pulled image """ return self.copy(self.name, self.tag, SkopeoTransport.DOCKER, SkopeoTransport.DOCKER_DAEMON)\ .using_transport(SkopeoTransport.DOCKER_DAEMON)
[ "def", "skopeo_pull", "(", "self", ")", ":", "return", "self", ".", "copy", "(", "self", ".", "name", ",", "self", ".", "tag", ",", "SkopeoTransport", ".", "DOCKER", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ")", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER_DAEMON", ")" ]
Pull image from Docker to local Docker daemon using skopeo :return: pulled image
[ "Pull", "image", "from", "Docker", "to", "local", "Docker", "daemon", "using", "skopeo" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L288-L295
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.skopeo_push
def skopeo_push(self, repository=None, tag=None): """ Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image """ return self.copy(repository, tag, SkopeoTransport.DOCKER_DAEMON, SkopeoTransport.DOCKER)\ .using_transport(SkopeoTransport.DOCKER)
python
def skopeo_push(self, repository=None, tag=None): """ Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image """ return self.copy(repository, tag, SkopeoTransport.DOCKER_DAEMON, SkopeoTransport.DOCKER)\ .using_transport(SkopeoTransport.DOCKER)
[ "def", "skopeo_push", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ")", ":", "return", "self", ".", "copy", "(", "repository", ",", "tag", ",", "SkopeoTransport", ".", "DOCKER_DAEMON", ",", "SkopeoTransport", ".", "DOCKER", ")", ".", "using_transport", "(", "SkopeoTransport", ".", "DOCKER", ")" ]
Push image from Docker daemon to Docker using skopeo :param repository: repository to be pushed to :param tag: tag :return: pushed image
[ "Push", "image", "from", "Docker", "daemon", "to", "Docker", "using", "skopeo" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L297-L305
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.copy
def copy(self, repository=None, tag=None, source_transport=None, target_transport=SkopeoTransport.DOCKER, source_path=None, target_path=None, logs=True): """ Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage """ if not repository: repository = self.name if not tag: tag = self.tag if self.tag else "latest" if target_transport == SkopeoTransport.OSTREE and tag and logs: logging.warning("tag was ignored") target = (DockerImage(repository, tag, pull_policy=DockerImagePullPolicy.NEVER) .using_transport(target_transport, target_path)) self.using_transport(source_transport, source_path) try: run_cmd(["skopeo", "copy", transport_param(self), transport_param(target)]) except subprocess.CalledProcessError: raise ConuException("There was an error while copying repository", self.name) return target
python
def copy(self, repository=None, tag=None, source_transport=None, target_transport=SkopeoTransport.DOCKER, source_path=None, target_path=None, logs=True): """ Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage """ if not repository: repository = self.name if not tag: tag = self.tag if self.tag else "latest" if target_transport == SkopeoTransport.OSTREE and tag and logs: logging.warning("tag was ignored") target = (DockerImage(repository, tag, pull_policy=DockerImagePullPolicy.NEVER) .using_transport(target_transport, target_path)) self.using_transport(source_transport, source_path) try: run_cmd(["skopeo", "copy", transport_param(self), transport_param(target)]) except subprocess.CalledProcessError: raise ConuException("There was an error while copying repository", self.name) return target
[ "def", "copy", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ",", "source_transport", "=", "None", ",", "target_transport", "=", "SkopeoTransport", ".", "DOCKER", ",", "source_path", "=", "None", ",", "target_path", "=", "None", ",", "logs", "=", "True", ")", ":", "if", "not", "repository", ":", "repository", "=", "self", ".", "name", "if", "not", "tag", ":", "tag", "=", "self", ".", "tag", "if", "self", ".", "tag", "else", "\"latest\"", "if", "target_transport", "==", "SkopeoTransport", ".", "OSTREE", "and", "tag", "and", "logs", ":", "logging", ".", "warning", "(", "\"tag was ignored\"", ")", "target", "=", "(", "DockerImage", "(", "repository", ",", "tag", ",", "pull_policy", "=", "DockerImagePullPolicy", ".", "NEVER", ")", ".", "using_transport", "(", "target_transport", ",", "target_path", ")", ")", "self", ".", "using_transport", "(", "source_transport", ",", "source_path", ")", "try", ":", "run_cmd", "(", "[", "\"skopeo\"", ",", "\"copy\"", ",", "transport_param", "(", "self", ")", ",", "transport_param", "(", "target", ")", "]", ")", "except", "subprocess", ".", "CalledProcessError", ":", "raise", "ConuException", "(", "\"There was an error while copying repository\"", ",", "self", ".", "name", ")", "return", "target" ]
Copy this image :param repository to be copied to :param tag :param source_transport Transport :param target_transport Transport :param source_path needed to specify for dir, docker-archive or oci transport :param target_path needed to specify for dir, docker-archive or oci transport :param logs enable/disable logs :return: the new DockerImage
[ "Copy", "this", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L307-L340
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.tag_image
def tag_image(self, repository=None, tag=None): """ Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage """ if not (repository or tag): raise ValueError("You need to specify either repository or tag.") r = repository or self.name t = "latest" if not tag else tag self.d.tag(image=self.get_full_name(), repository=r, tag=t) return DockerImage(r, tag=t)
python
def tag_image(self, repository=None, tag=None): """ Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage """ if not (repository or tag): raise ValueError("You need to specify either repository or tag.") r = repository or self.name t = "latest" if not tag else tag self.d.tag(image=self.get_full_name(), repository=r, tag=t) return DockerImage(r, tag=t)
[ "def", "tag_image", "(", "self", ",", "repository", "=", "None", ",", "tag", "=", "None", ")", ":", "if", "not", "(", "repository", "or", "tag", ")", ":", "raise", "ValueError", "(", "\"You need to specify either repository or tag.\"", ")", "r", "=", "repository", "or", "self", ".", "name", "t", "=", "\"latest\"", "if", "not", "tag", "else", "tag", "self", ".", "d", ".", "tag", "(", "image", "=", "self", ".", "get_full_name", "(", ")", ",", "repository", "=", "r", ",", "tag", "=", "t", ")", "return", "DockerImage", "(", "r", ",", "tag", "=", "t", ")" ]
Apply additional tags to the image or even add a new name :param repository: str, see constructor :param tag: str, see constructor :return: instance of DockerImage
[ "Apply", "additional", "tags", "to", "the", "image", "or", "even", "add", "a", "new", "name" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L342-L355
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.inspect
def inspect(self, refresh=True): """ provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict """ if refresh or not self._inspect_data: identifier = self._id or self.get_full_name() if not identifier: raise ConuException("This image does not have a valid identifier.") self._inspect_data = self.d.inspect_image(identifier) return self._inspect_data
python
def inspect(self, refresh=True): """ provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict """ if refresh or not self._inspect_data: identifier = self._id or self.get_full_name() if not identifier: raise ConuException("This image does not have a valid identifier.") self._inspect_data = self.d.inspect_image(identifier) return self._inspect_data
[ "def", "inspect", "(", "self", ",", "refresh", "=", "True", ")", ":", "if", "refresh", "or", "not", "self", ".", "_inspect_data", ":", "identifier", "=", "self", ".", "_id", "or", "self", ".", "get_full_name", "(", ")", "if", "not", "identifier", ":", "raise", "ConuException", "(", "\"This image does not have a valid identifier.\"", ")", "self", ".", "_inspect_data", "=", "self", ".", "d", ".", "inspect_image", "(", "identifier", ")", "return", "self", ".", "_inspect_data" ]
provide metadata about the image; flip refresh=True if cached metadata are enough :param refresh: bool, update the metadata with up to date content :return: dict
[ "provide", "metadata", "about", "the", "image", ";", "flip", "refresh", "=", "True", "if", "cached", "metadata", "are", "enough" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L357-L369
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.has_pkgs_signed_with
def has_pkgs_signed_with(self, allowed_keys): """ Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool """ if not allowed_keys or not isinstance(allowed_keys, list): raise ConuException("allowed_keys must be a list") command = ['rpm', '-qa', '--qf', '%{name} %{SIGPGP:pgpsig}\n'] cont = self.run_via_binary(command=command) try: out = cont.logs_unicode()[:-1].split('\n') check_signatures(out, allowed_keys) finally: cont.stop() cont.delete() return True
python
def has_pkgs_signed_with(self, allowed_keys): """ Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool """ if not allowed_keys or not isinstance(allowed_keys, list): raise ConuException("allowed_keys must be a list") command = ['rpm', '-qa', '--qf', '%{name} %{SIGPGP:pgpsig}\n'] cont = self.run_via_binary(command=command) try: out = cont.logs_unicode()[:-1].split('\n') check_signatures(out, allowed_keys) finally: cont.stop() cont.delete() return True
[ "def", "has_pkgs_signed_with", "(", "self", ",", "allowed_keys", ")", ":", "if", "not", "allowed_keys", "or", "not", "isinstance", "(", "allowed_keys", ",", "list", ")", ":", "raise", "ConuException", "(", "\"allowed_keys must be a list\"", ")", "command", "=", "[", "'rpm'", ",", "'-qa'", ",", "'--qf'", ",", "'%{name} %{SIGPGP:pgpsig}\\n'", "]", "cont", "=", "self", ".", "run_via_binary", "(", "command", "=", "command", ")", "try", ":", "out", "=", "cont", ".", "logs_unicode", "(", ")", "[", ":", "-", "1", "]", ".", "split", "(", "'\\n'", ")", "check_signatures", "(", "out", ",", "allowed_keys", ")", "finally", ":", "cont", ".", "stop", "(", ")", "cont", ".", "delete", "(", ")", "return", "True" ]
Check signature of packages installed in image. Raises exception when * rpm binary is not installed in image * parsing of rpm fails * there are packages in image that are not signed with one of allowed keys :param allowed_keys: list of allowed keys :return: bool
[ "Check", "signature", "of", "packages", "installed", "in", "image", ".", "Raises", "exception", "when" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L639-L662
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.build
def build(cls, path, tag=None, dockerfile=None): """ Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage """ if not path: raise ConuException('Please specify path to the directory containing the Dockerfile') client = get_client() response = [line for line in client.build(path, rm=True, tag=tag, dockerfile=dockerfile, quiet=True)] if not response: raise ConuException('Failed to get ID of image') # The expected output is just one line with image ID if len(response) > 1: raise ConuException('Build failed: ' + str(response)) # get ID from output # b'{"stream":"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\n"}\r\n' response_utf = response[0].decode('utf-8') if response_utf[:11] != '{"stream":"' or response_utf[-6:] != '\\n"}\r\n': raise ConuException('Failed to parse ID from ' + response_utf) image_id = response_utf[11:-6] return cls(None, identifier=image_id)
python
def build(cls, path, tag=None, dockerfile=None): """ Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage """ if not path: raise ConuException('Please specify path to the directory containing the Dockerfile') client = get_client() response = [line for line in client.build(path, rm=True, tag=tag, dockerfile=dockerfile, quiet=True)] if not response: raise ConuException('Failed to get ID of image') # The expected output is just one line with image ID if len(response) > 1: raise ConuException('Build failed: ' + str(response)) # get ID from output # b'{"stream":"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\n"}\r\n' response_utf = response[0].decode('utf-8') if response_utf[:11] != '{"stream":"' or response_utf[-6:] != '\\n"}\r\n': raise ConuException('Failed to parse ID from ' + response_utf) image_id = response_utf[11:-6] return cls(None, identifier=image_id)
[ "def", "build", "(", "cls", ",", "path", ",", "tag", "=", "None", ",", "dockerfile", "=", "None", ")", ":", "if", "not", "path", ":", "raise", "ConuException", "(", "'Please specify path to the directory containing the Dockerfile'", ")", "client", "=", "get_client", "(", ")", "response", "=", "[", "line", "for", "line", "in", "client", ".", "build", "(", "path", ",", "rm", "=", "True", ",", "tag", "=", "tag", ",", "dockerfile", "=", "dockerfile", ",", "quiet", "=", "True", ")", "]", "if", "not", "response", ":", "raise", "ConuException", "(", "'Failed to get ID of image'", ")", "# The expected output is just one line with image ID", "if", "len", "(", "response", ")", ">", "1", ":", "raise", "ConuException", "(", "'Build failed: '", "+", "str", "(", "response", ")", ")", "# get ID from output", "# b'{\"stream\":\"sha256:39c7bac4e2da37983203df4fcf612a02de9e6f6456a7f3434d1fccbc9ad639a5\\\\n\"}\\r\\n'", "response_utf", "=", "response", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", "if", "response_utf", "[", ":", "11", "]", "!=", "'{\"stream\":\"'", "or", "response_utf", "[", "-", "6", ":", "]", "!=", "'\\\\n\"}\\r\\n'", ":", "raise", "ConuException", "(", "'Failed to parse ID from '", "+", "response_utf", ")", "image_id", "=", "response_utf", "[", "11", ":", "-", "6", "]", "return", "cls", "(", "None", ",", "identifier", "=", "image_id", ")" ]
Build the image from the provided dockerfile in path :param path : str, path to the directory containing the Dockerfile :param tag: str, A tag to add to the final image :param dockerfile: str, path within the build context to the Dockerfile :return: instance of DockerImage
[ "Build", "the", "image", "from", "the", "provided", "dockerfile", "in", "path" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L681-L711
train
user-cont/conu
conu/backend/docker/image.py
DockerImage.layers
def layers(self, rev=True): """ Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages """ image_layers = [ DockerImage(None, identifier=x, pull_policy=DockerImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
python
def layers(self, rev=True): """ Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages """ image_layers = [ DockerImage(None, identifier=x, pull_policy=DockerImagePullPolicy.NEVER) for x in self.get_layer_ids() ] if not rev: image_layers.reverse() return image_layers
[ "def", "layers", "(", "self", ",", "rev", "=", "True", ")", ":", "image_layers", "=", "[", "DockerImage", "(", "None", ",", "identifier", "=", "x", ",", "pull_policy", "=", "DockerImagePullPolicy", ".", "NEVER", ")", "for", "x", "in", "self", ".", "get_layer_ids", "(", ")", "]", "if", "not", "rev", ":", "image_layers", ".", "reverse", "(", ")", "return", "image_layers" ]
Get list of DockerImage for every layer in image :param rev: get layers rev :return: list of DockerImages
[ "Get", "list", "of", "DockerImage", "for", "every", "layer", "in", "image" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L725-L738
train
user-cont/conu
conu/backend/docker/image.py
S2IDockerImage.extend
def extend(self, source, new_image_name, s2i_args=None): """ extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance """ s2i_args = s2i_args or [] c = self._s2i_command(["build"] + s2i_args + [source, self.get_full_name()]) if new_image_name: c.append(new_image_name) try: run_cmd(c) except subprocess.CalledProcessError as ex: raise ConuException("s2i build failed: %s" % ex) return S2IDockerImage(new_image_name)
python
def extend(self, source, new_image_name, s2i_args=None): """ extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance """ s2i_args = s2i_args or [] c = self._s2i_command(["build"] + s2i_args + [source, self.get_full_name()]) if new_image_name: c.append(new_image_name) try: run_cmd(c) except subprocess.CalledProcessError as ex: raise ConuException("s2i build failed: %s" % ex) return S2IDockerImage(new_image_name)
[ "def", "extend", "(", "self", ",", "source", ",", "new_image_name", ",", "s2i_args", "=", "None", ")", ":", "s2i_args", "=", "s2i_args", "or", "[", "]", "c", "=", "self", ".", "_s2i_command", "(", "[", "\"build\"", "]", "+", "s2i_args", "+", "[", "source", ",", "self", ".", "get_full_name", "(", ")", "]", ")", "if", "new_image_name", ":", "c", ".", "append", "(", "new_image_name", ")", "try", ":", "run_cmd", "(", "c", ")", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "raise", "ConuException", "(", "\"s2i build failed: %s\"", "%", "ex", ")", "return", "S2IDockerImage", "(", "new_image_name", ")" ]
extend this s2i-enabled image using provided source, raises ConuException if `s2i build` fails :param source: str, source used to extend the image, can be path or url :param new_image_name: str, name of the new, extended image :param s2i_args: list of str, additional options and arguments provided to `s2i build` :return: S2Image instance
[ "extend", "this", "s2i", "-", "enabled", "image", "using", "provided", "source", "raises", "ConuException", "if", "s2i", "build", "fails" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L775-L793
train
user-cont/conu
conu/backend/docker/image.py
S2IDockerImage.usage
def usage(self): """ Provide output of `s2i usage` :return: str """ c = self._s2i_command(["usage", self.get_full_name()]) with open(os.devnull, "w") as fd: process = subprocess.Popen(c, stdout=fd, stderr=subprocess.PIPE) _, output = process.communicate() retcode = process.poll() if retcode: raise ConuException("`s2i usage` failed: %s" % output) return output.decode("utf-8").strip()
python
def usage(self): """ Provide output of `s2i usage` :return: str """ c = self._s2i_command(["usage", self.get_full_name()]) with open(os.devnull, "w") as fd: process = subprocess.Popen(c, stdout=fd, stderr=subprocess.PIPE) _, output = process.communicate() retcode = process.poll() if retcode: raise ConuException("`s2i usage` failed: %s" % output) return output.decode("utf-8").strip()
[ "def", "usage", "(", "self", ")", ":", "c", "=", "self", ".", "_s2i_command", "(", "[", "\"usage\"", ",", "self", ".", "get_full_name", "(", ")", "]", ")", "with", "open", "(", "os", ".", "devnull", ",", "\"w\"", ")", "as", "fd", ":", "process", "=", "subprocess", ".", "Popen", "(", "c", ",", "stdout", "=", "fd", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "_", ",", "output", "=", "process", ".", "communicate", "(", ")", "retcode", "=", "process", ".", "poll", "(", ")", "if", "retcode", ":", "raise", "ConuException", "(", "\"`s2i usage` failed: %s\"", "%", "output", ")", "return", "output", ".", "decode", "(", "\"utf-8\"", ")", ".", "strip", "(", ")" ]
Provide output of `s2i usage` :return: str
[ "Provide", "output", "of", "s2i", "usage" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/image.py#L795-L808
train
user-cont/conu
conu/backend/origin/backend.py
OpenshiftBackend.http_request
def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None): """ perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict """ host = host or '127.0.0.1' port = port or 8080 url = get_url(host=host, port=port, path=path) return self.http_session.request(method, url, json=json, data=data)
python
def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None): """ perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict """ host = host or '127.0.0.1' port = port or 8080 url = get_url(host=host, port=port, path=path) return self.http_session.request(method, url, json=json, data=data)
[ "def", "http_request", "(", "self", ",", "path", "=", "\"/\"", ",", "method", "=", "\"GET\"", ",", "host", "=", "None", ",", "port", "=", "None", ",", "json", "=", "False", ",", "data", "=", "None", ")", ":", "host", "=", "host", "or", "'127.0.0.1'", "port", "=", "port", "or", "8080", "url", "=", "get_url", "(", "host", "=", "host", ",", "port", "=", "port", ",", "path", "=", "path", ")", "return", "self", ".", "http_session", ".", "request", "(", "method", ",", "url", ",", "json", "=", "json", ",", "data", "=", "data", ")" ]
perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict
[ "perform", "a", "HTTP", "request" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/origin/backend.py#L62-L79
train
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.system_requirements
def system_requirements(): """ Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing """ command_exists("systemd-nspawn", ["systemd-nspawn", "--version"], "Command systemd-nspawn does not seems to be present on your system" "Do you have system with systemd") command_exists( "machinectl", ["machinectl", "--no-pager", "--help"], "Command machinectl does not seems to be present on your system" "Do you have system with systemd") if "Enforcing" in run_cmd(["getenforce"], return_output=True, ignore_status=True): logger.error("Please disable selinux (setenforce 0), selinux blocks some nspawn operations" "This may lead to strange behaviour")
python
def system_requirements(): """ Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing """ command_exists("systemd-nspawn", ["systemd-nspawn", "--version"], "Command systemd-nspawn does not seems to be present on your system" "Do you have system with systemd") command_exists( "machinectl", ["machinectl", "--no-pager", "--help"], "Command machinectl does not seems to be present on your system" "Do you have system with systemd") if "Enforcing" in run_cmd(["getenforce"], return_output=True, ignore_status=True): logger.error("Please disable selinux (setenforce 0), selinux blocks some nspawn operations" "This may lead to strange behaviour")
[ "def", "system_requirements", "(", ")", ":", "command_exists", "(", "\"systemd-nspawn\"", ",", "[", "\"systemd-nspawn\"", ",", "\"--version\"", "]", ",", "\"Command systemd-nspawn does not seems to be present on your system\"", "\"Do you have system with systemd\"", ")", "command_exists", "(", "\"machinectl\"", ",", "[", "\"machinectl\"", ",", "\"--no-pager\"", ",", "\"--help\"", "]", ",", "\"Command machinectl does not seems to be present on your system\"", "\"Do you have system with systemd\"", ")", "if", "\"Enforcing\"", "in", "run_cmd", "(", "[", "\"getenforce\"", "]", ",", "return_output", "=", "True", ",", "ignore_status", "=", "True", ")", ":", "logger", ".", "error", "(", "\"Please disable selinux (setenforce 0), selinux blocks some nspawn operations\"", "\"This may lead to strange behaviour\"", ")" ]
Check if all necessary packages are installed on system :return: None or raise exception if some tooling is missing
[ "Check", "if", "all", "necessary", "packages", "are", "installed", "on", "system" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L156-L173
train
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage._generate_id
def _generate_id(self): """ create new unique identifier """ name = self.name.replace(self.special_separator, "-").replace(".", "-") loc = "\/" if self.location: loc = self.location _id = "{PREFIX}{SEP}{NAME}{HASH}{SEP}".format( PREFIX=constants.CONU_ARTIFACT_TAG, NAME=name, HASH=hashlib.sha512(loc).hexdigest()[: 10], SEP=self.special_separator ) return _id
python
def _generate_id(self): """ create new unique identifier """ name = self.name.replace(self.special_separator, "-").replace(".", "-") loc = "\/" if self.location: loc = self.location _id = "{PREFIX}{SEP}{NAME}{HASH}{SEP}".format( PREFIX=constants.CONU_ARTIFACT_TAG, NAME=name, HASH=hashlib.sha512(loc).hexdigest()[: 10], SEP=self.special_separator ) return _id
[ "def", "_generate_id", "(", "self", ")", ":", "name", "=", "self", ".", "name", ".", "replace", "(", "self", ".", "special_separator", ",", "\"-\"", ")", ".", "replace", "(", "\".\"", ",", "\"-\"", ")", "loc", "=", "\"\\/\"", "if", "self", ".", "location", ":", "loc", "=", "self", ".", "location", "_id", "=", "\"{PREFIX}{SEP}{NAME}{HASH}{SEP}\"", ".", "format", "(", "PREFIX", "=", "constants", ".", "CONU_ARTIFACT_TAG", ",", "NAME", "=", "name", ",", "HASH", "=", "hashlib", ".", "sha512", "(", "loc", ")", ".", "hexdigest", "(", ")", "[", ":", "10", "]", ",", "SEP", "=", "self", ".", "special_separator", ")", "return", "_id" ]
create new unique identifier
[ "create", "new", "unique", "identifier" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L209-L221
train
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.pull
def pull(self): """ Pull this image from URL. :return: None """ if not os.path.exists(CONU_IMAGES_STORE): os.makedirs(CONU_IMAGES_STORE) logger.debug( "Try to pull: {} -> {}".format(self.location, self.local_location)) if not self._is_local(): compressed_location = self.local_location + ".xz" run_cmd(["curl", "-f", "-L", "-o", compressed_location, self.location]) run_cmd(["xz", "-d", compressed_location]) else: if self.location.endswith("xz"): compressed_location = self.local_location + ".xz" run_cmd(["cp", self.location, compressed_location]) run_cmd(["xz", "-d", compressed_location]) else: run_cmd(["cp", self.location, self.local_location])
python
def pull(self): """ Pull this image from URL. :return: None """ if not os.path.exists(CONU_IMAGES_STORE): os.makedirs(CONU_IMAGES_STORE) logger.debug( "Try to pull: {} -> {}".format(self.location, self.local_location)) if not self._is_local(): compressed_location = self.local_location + ".xz" run_cmd(["curl", "-f", "-L", "-o", compressed_location, self.location]) run_cmd(["xz", "-d", compressed_location]) else: if self.location.endswith("xz"): compressed_location = self.local_location + ".xz" run_cmd(["cp", self.location, compressed_location]) run_cmd(["xz", "-d", compressed_location]) else: run_cmd(["cp", self.location, self.local_location])
[ "def", "pull", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "CONU_IMAGES_STORE", ")", ":", "os", ".", "makedirs", "(", "CONU_IMAGES_STORE", ")", "logger", ".", "debug", "(", "\"Try to pull: {} -> {}\"", ".", "format", "(", "self", ".", "location", ",", "self", ".", "local_location", ")", ")", "if", "not", "self", ".", "_is_local", "(", ")", ":", "compressed_location", "=", "self", ".", "local_location", "+", "\".xz\"", "run_cmd", "(", "[", "\"curl\"", ",", "\"-f\"", ",", "\"-L\"", ",", "\"-o\"", ",", "compressed_location", ",", "self", ".", "location", "]", ")", "run_cmd", "(", "[", "\"xz\"", ",", "\"-d\"", ",", "compressed_location", "]", ")", "else", ":", "if", "self", ".", "location", ".", "endswith", "(", "\"xz\"", ")", ":", "compressed_location", "=", "self", ".", "local_location", "+", "\".xz\"", "run_cmd", "(", "[", "\"cp\"", ",", "self", ".", "location", ",", "compressed_location", "]", ")", "run_cmd", "(", "[", "\"xz\"", ",", "\"-d\"", ",", "compressed_location", "]", ")", "else", ":", "run_cmd", "(", "[", "\"cp\"", ",", "self", ".", "location", ",", "self", ".", "local_location", "]", ")" ]
Pull this image from URL. :return: None
[ "Pull", "this", "image", "from", "URL", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L232-L253
train
user-cont/conu
conu/backend/nspawn/image.py
NspawnImage.run_via_binary
def run_via_binary(self, command=None, foreground=False, volumes=None, additional_opts=None, default_options=None, name=None, *args, **kwargs): """ Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance """ command = deepcopy(command) or [] volumes = deepcopy(volumes) or [] additional_opts = deepcopy(additional_opts) or [] internalkw = deepcopy(kwargs) or {} inernalargs = deepcopy(args) or [] if default_options is None: default_options = ["-b"] # TODO: reconsile parameters (changed from API definition) logger.info("run container via binary in background") machine_name = constants.CONU_ARTIFACT_TAG if name: machine_name += name else: machine_name += random_str() if not foreground: # WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal, # it systemd-nspawn does some magic with console # TODO: is able to avoid this behaviour in better way? internalkw["stdout"] = subprocess.PIPE internalkw["stderr"] = subprocess.PIPE additional_opts += default_options if volumes: additional_opts += self.get_volume_options(volumes=volumes) logger.debug("starting NSPAWN") systemd_command = [ "systemd-nspawn", "--machine", machine_name, "-i", self.local_location] + additional_opts + command logger.debug("Start command: %s" % " ".join(systemd_command)) callback_method = (subprocess.Popen, systemd_command, inernalargs, internalkw) self.container_process = NspawnContainer.internal_run_container( name=machine_name, callback_method=callback_method, foreground=foreground ) if foreground: return self.container_process else: return NspawnContainer(self, None, name=machine_name, start_process=self.container_process, start_action=callback_method)
python
def run_via_binary(self, command=None, foreground=False, volumes=None, additional_opts=None, default_options=None, name=None, *args, **kwargs): """ Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance """ command = deepcopy(command) or [] volumes = deepcopy(volumes) or [] additional_opts = deepcopy(additional_opts) or [] internalkw = deepcopy(kwargs) or {} inernalargs = deepcopy(args) or [] if default_options is None: default_options = ["-b"] # TODO: reconsile parameters (changed from API definition) logger.info("run container via binary in background") machine_name = constants.CONU_ARTIFACT_TAG if name: machine_name += name else: machine_name += random_str() if not foreground: # WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal, # it systemd-nspawn does some magic with console # TODO: is able to avoid this behaviour in better way? internalkw["stdout"] = subprocess.PIPE internalkw["stderr"] = subprocess.PIPE additional_opts += default_options if volumes: additional_opts += self.get_volume_options(volumes=volumes) logger.debug("starting NSPAWN") systemd_command = [ "systemd-nspawn", "--machine", machine_name, "-i", self.local_location] + additional_opts + command logger.debug("Start command: %s" % " ".join(systemd_command)) callback_method = (subprocess.Popen, systemd_command, inernalargs, internalkw) self.container_process = NspawnContainer.internal_run_container( name=machine_name, callback_method=callback_method, foreground=foreground ) if foreground: return self.container_process else: return NspawnContainer(self, None, name=machine_name, start_process=self.container_process, start_action=callback_method)
[ "def", "run_via_binary", "(", "self", ",", "command", "=", "None", ",", "foreground", "=", "False", ",", "volumes", "=", "None", ",", "additional_opts", "=", "None", ",", "default_options", "=", "None", ",", "name", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "command", "=", "deepcopy", "(", "command", ")", "or", "[", "]", "volumes", "=", "deepcopy", "(", "volumes", ")", "or", "[", "]", "additional_opts", "=", "deepcopy", "(", "additional_opts", ")", "or", "[", "]", "internalkw", "=", "deepcopy", "(", "kwargs", ")", "or", "{", "}", "inernalargs", "=", "deepcopy", "(", "args", ")", "or", "[", "]", "if", "default_options", "is", "None", ":", "default_options", "=", "[", "\"-b\"", "]", "# TODO: reconsile parameters (changed from API definition)", "logger", ".", "info", "(", "\"run container via binary in background\"", ")", "machine_name", "=", "constants", ".", "CONU_ARTIFACT_TAG", "if", "name", ":", "machine_name", "+=", "name", "else", ":", "machine_name", "+=", "random_str", "(", ")", "if", "not", "foreground", ":", "# WARN: avoid to run boot without stderr and stdout to terminal, it breaks terminal,", "# it systemd-nspawn does some magic with console", "# TODO: is able to avoid this behaviour in better way?", "internalkw", "[", "\"stdout\"", "]", "=", "subprocess", ".", "PIPE", "internalkw", "[", "\"stderr\"", "]", "=", "subprocess", ".", "PIPE", "additional_opts", "+=", "default_options", "if", "volumes", ":", "additional_opts", "+=", "self", ".", "get_volume_options", "(", "volumes", "=", "volumes", ")", "logger", ".", "debug", "(", "\"starting NSPAWN\"", ")", "systemd_command", "=", "[", "\"systemd-nspawn\"", ",", "\"--machine\"", ",", "machine_name", ",", "\"-i\"", ",", "self", ".", "local_location", "]", "+", "additional_opts", "+", "command", "logger", ".", "debug", "(", "\"Start command: %s\"", "%", "\" \"", ".", "join", "(", "systemd_command", ")", ")", "callback_method", "=", "(", "subprocess", ".", "Popen", ",", "systemd_command", ",", "inernalargs", ",", "internalkw", ")", "self", ".", "container_process", "=", "NspawnContainer", ".", "internal_run_container", "(", "name", "=", "machine_name", ",", "callback_method", "=", "callback_method", ",", "foreground", "=", "foreground", ")", "if", "foreground", ":", "return", "self", ".", "container_process", "else", ":", "return", "NspawnContainer", "(", "self", ",", "None", ",", "name", "=", "machine_name", ",", "start_process", "=", "self", ".", "container_process", ",", "start_action", "=", "callback_method", ")" ]
Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process object :param command: list - command to run :param foreground: bool - run process at foreground :param volumes: list - put additional bind mounts :param additional_opts: list of more boot options for systemd-nspawn command :param default_options: default boot option (-b) :param name: str - name of running instance :param args: pass thru params to subprocess.Popen :param kwargs: pass thru params to subprocess.Popen :return: process or NspawnContianer instance
[ "Create", "new", "instance", "NspawnContianer", "in", "case", "of", "not", "running", "at", "foreground", "in", "case", "foreground", "run", "return", "process", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/image.py#L345-L403
train
user-cont/conu
conu/utils/rpms.py
process_rpm_ql_line
def process_rpm_ql_line(line_str, allowed_keys): """ Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool """ try: name, key_str = line_str.split(' ', 1) except ValueError: logger.error("Failed to split line '{0}".format(repr(line_str))) return False if name in no_key_pkgs: return True if key_str == NONE_KEY: logger.error("Unsigned package {0}".format(name)) return False key_match = re.match(KEY, key_str) if not key_match: logger.error('Could not process line "{0}"'.format(line_str)) return False used_key = key_match.group(1) if used_key in allowed_keys: return True logger.error("Wrong key for '{0}' ({1})".format(name, used_key)) return False
python
def process_rpm_ql_line(line_str, allowed_keys): """ Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool """ try: name, key_str = line_str.split(' ', 1) except ValueError: logger.error("Failed to split line '{0}".format(repr(line_str))) return False if name in no_key_pkgs: return True if key_str == NONE_KEY: logger.error("Unsigned package {0}".format(name)) return False key_match = re.match(KEY, key_str) if not key_match: logger.error('Could not process line "{0}"'.format(line_str)) return False used_key = key_match.group(1) if used_key in allowed_keys: return True logger.error("Wrong key for '{0}' ({1})".format(name, used_key)) return False
[ "def", "process_rpm_ql_line", "(", "line_str", ",", "allowed_keys", ")", ":", "try", ":", "name", ",", "key_str", "=", "line_str", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "logger", ".", "error", "(", "\"Failed to split line '{0}\"", ".", "format", "(", "repr", "(", "line_str", ")", ")", ")", "return", "False", "if", "name", "in", "no_key_pkgs", ":", "return", "True", "if", "key_str", "==", "NONE_KEY", ":", "logger", ".", "error", "(", "\"Unsigned package {0}\"", ".", "format", "(", "name", ")", ")", "return", "False", "key_match", "=", "re", ".", "match", "(", "KEY", ",", "key_str", ")", "if", "not", "key_match", ":", "logger", ".", "error", "(", "'Could not process line \"{0}\"'", ".", "format", "(", "line_str", ")", ")", "return", "False", "used_key", "=", "key_match", ".", "group", "(", "1", ")", "if", "used_key", "in", "allowed_keys", ":", "return", "True", "logger", ".", "error", "(", "\"Wrong key for '{0}' ({1})\"", ".", "format", "(", "name", ",", "used_key", ")", ")", "return", "False" ]
Checks single line of rpm-ql for correct keys :param line_str: line to process :param allowed_keys: list of allowed keys :return: bool
[ "Checks", "single", "line", "of", "rpm", "-", "ql", "for", "correct", "keys" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/rpms.py#L29-L55
train
user-cont/conu
conu/utils/rpms.py
check_signatures
def check_signatures(pkg_list, allowed_keys): """ Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool """ all_passed = True for line_str in pkg_list: all_passed &= process_rpm_ql_line(line_str.strip(), allowed_keys) if not all_passed: raise PackageSignatureException( 'Error while checking rpm signatures, see logs for more info')
python
def check_signatures(pkg_list, allowed_keys): """ Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool """ all_passed = True for line_str in pkg_list: all_passed &= process_rpm_ql_line(line_str.strip(), allowed_keys) if not all_passed: raise PackageSignatureException( 'Error while checking rpm signatures, see logs for more info')
[ "def", "check_signatures", "(", "pkg_list", ",", "allowed_keys", ")", ":", "all_passed", "=", "True", "for", "line_str", "in", "pkg_list", ":", "all_passed", "&=", "process_rpm_ql_line", "(", "line_str", ".", "strip", "(", ")", ",", "allowed_keys", ")", "if", "not", "all_passed", ":", "raise", "PackageSignatureException", "(", "'Error while checking rpm signatures, see logs for more info'", ")" ]
Go through list of packages with signatures and check if all are properly signed :param pkg_list: list of packages in format '%{name} %{SIGPGP:pgpsig}' :param allowed_keys: list of allowed keys :return: bool
[ "Go", "through", "list", "of", "packages", "with", "signatures", "and", "check", "if", "all", "are", "properly", "signed" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/rpms.py#L58-L72
train
user-cont/conu
conu/backend/docker/container.py
DockerContainer.get_ports
def get_ports(self): """ get ports specified in container metadata :return: list of str """ ports = [] container_ports = self.inspect(refresh=True)["NetworkSettings"]["Ports"] if not container_ports: return ports for p in container_ports: # TODO: gracefullness, error handling ports.append(p.split("/")[0]) return ports
python
def get_ports(self): """ get ports specified in container metadata :return: list of str """ ports = [] container_ports = self.inspect(refresh=True)["NetworkSettings"]["Ports"] if not container_ports: return ports for p in container_ports: # TODO: gracefullness, error handling ports.append(p.split("/")[0]) return ports
[ "def", "get_ports", "(", "self", ")", ":", "ports", "=", "[", "]", "container_ports", "=", "self", ".", "inspect", "(", "refresh", "=", "True", ")", "[", "\"NetworkSettings\"", "]", "[", "\"Ports\"", "]", "if", "not", "container_ports", ":", "return", "ports", "for", "p", "in", "container_ports", ":", "# TODO: gracefullness, error handling", "ports", ".", "append", "(", "p", ".", "split", "(", "\"/\"", ")", "[", "0", "]", ")", "return", "ports" ]
get ports specified in container metadata :return: list of str
[ "get", "ports", "specified", "in", "container", "metadata" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/container.py#L350-L363
train
user-cont/conu
conu/apidefs/backend.py
Backend._clean_tmp_dirs
def _clean_tmp_dirs(self): """ Remove temporary dir associated with this backend instance. :return: None """ def onerror(fnc, path, excinfo): # we might not have rights to do this, the files could be owned by root self.logger.info("we were not able to remove temporary file %s: %s", path, excinfo[1]) shutil.rmtree(self.tmpdir, onerror=onerror) self.tmpdir = None global _backend_tmpdir _backend_tmpdir = None
python
def _clean_tmp_dirs(self): """ Remove temporary dir associated with this backend instance. :return: None """ def onerror(fnc, path, excinfo): # we might not have rights to do this, the files could be owned by root self.logger.info("we were not able to remove temporary file %s: %s", path, excinfo[1]) shutil.rmtree(self.tmpdir, onerror=onerror) self.tmpdir = None global _backend_tmpdir _backend_tmpdir = None
[ "def", "_clean_tmp_dirs", "(", "self", ")", ":", "def", "onerror", "(", "fnc", ",", "path", ",", "excinfo", ")", ":", "# we might not have rights to do this, the files could be owned by root", "self", ".", "logger", ".", "info", "(", "\"we were not able to remove temporary file %s: %s\"", ",", "path", ",", "excinfo", "[", "1", "]", ")", "shutil", ".", "rmtree", "(", "self", ".", "tmpdir", ",", "onerror", "=", "onerror", ")", "self", ".", "tmpdir", "=", "None", "global", "_backend_tmpdir", "_backend_tmpdir", "=", "None" ]
Remove temporary dir associated with this backend instance. :return: None
[ "Remove", "temporary", "dir", "associated", "with", "this", "backend", "instance", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/backend.py#L147-L161
train
user-cont/conu
conu/apidefs/backend.py
Backend._clean
def _clean(self): """ Method for cleaning according to object cleanup policy value :return: None """ if CleanupPolicy.EVERYTHING in self.cleanup: self.cleanup_containers() self.cleanup_volumes() self.cleanup_images() self._clean_tmp_dirs() else: if CleanupPolicy.CONTAINERS in self.cleanup: self.cleanup_containers() if CleanupPolicy.VOLUMES in self.cleanup: self.cleanup_volumes() if CleanupPolicy.IMAGES in self.cleanup: self.cleanup_images() if CleanupPolicy.TMP_DIRS in self.cleanup: self._clean_tmp_dirs()
python
def _clean(self): """ Method for cleaning according to object cleanup policy value :return: None """ if CleanupPolicy.EVERYTHING in self.cleanup: self.cleanup_containers() self.cleanup_volumes() self.cleanup_images() self._clean_tmp_dirs() else: if CleanupPolicy.CONTAINERS in self.cleanup: self.cleanup_containers() if CleanupPolicy.VOLUMES in self.cleanup: self.cleanup_volumes() if CleanupPolicy.IMAGES in self.cleanup: self.cleanup_images() if CleanupPolicy.TMP_DIRS in self.cleanup: self._clean_tmp_dirs()
[ "def", "_clean", "(", "self", ")", ":", "if", "CleanupPolicy", ".", "EVERYTHING", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_containers", "(", ")", "self", ".", "cleanup_volumes", "(", ")", "self", ".", "cleanup_images", "(", ")", "self", ".", "_clean_tmp_dirs", "(", ")", "else", ":", "if", "CleanupPolicy", ".", "CONTAINERS", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_containers", "(", ")", "if", "CleanupPolicy", ".", "VOLUMES", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_volumes", "(", ")", "if", "CleanupPolicy", ".", "IMAGES", "in", "self", ".", "cleanup", ":", "self", ".", "cleanup_images", "(", ")", "if", "CleanupPolicy", ".", "TMP_DIRS", "in", "self", ".", "cleanup", ":", "self", ".", "_clean_tmp_dirs", "(", ")" ]
Method for cleaning according to object cleanup policy value :return: None
[ "Method", "for", "cleaning", "according", "to", "object", "cleanup", "policy", "value" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/apidefs/backend.py#L187-L206
train
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.list_containers
def list_containers(self): """ list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer` """ data = run_cmd(["machinectl", "list", "--no-legend", "--no-pager"], return_output=True) output = [] reg = re.compile(r"\s+") for line in data.split("\n"): stripped = line.strip() if stripped: parts = reg.split(stripped) name = parts[0] output.append(self.ContainerClass(None, None, name=name)) return output
python
def list_containers(self): """ list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer` """ data = run_cmd(["machinectl", "list", "--no-legend", "--no-pager"], return_output=True) output = [] reg = re.compile(r"\s+") for line in data.split("\n"): stripped = line.strip() if stripped: parts = reg.split(stripped) name = parts[0] output.append(self.ContainerClass(None, None, name=name)) return output
[ "def", "list_containers", "(", "self", ")", ":", "data", "=", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"list\"", ",", "\"--no-legend\"", ",", "\"--no-pager\"", "]", ",", "return_output", "=", "True", ")", "output", "=", "[", "]", "reg", "=", "re", ".", "compile", "(", "r\"\\s+\"", ")", "for", "line", "in", "data", ".", "split", "(", "\"\\n\"", ")", ":", "stripped", "=", "line", ".", "strip", "(", ")", "if", "stripped", ":", "parts", "=", "reg", ".", "split", "(", "stripped", ")", "name", "=", "parts", "[", "0", "]", "output", ".", "append", "(", "self", ".", "ContainerClass", "(", "None", ",", "None", ",", "name", "=", "name", ")", ")", "return", "output" ]
list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer`
[ "list", "all", "available", "nspawn", "containers" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L44-L60
train
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.list_images
def list_images(self): """ list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage` """ # Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \ # Sun 2017-11-05 08:30:10 CET data = os.listdir(CONU_IMAGES_STORE) output = [] for name in data: output.append(self.ImageClass(name, pull_policy=ImagePullPolicy.NEVER)) return output
python
def list_images(self): """ list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage` """ # Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \ # Sun 2017-11-05 08:30:10 CET data = os.listdir(CONU_IMAGES_STORE) output = [] for name in data: output.append(self.ImageClass(name, pull_policy=ImagePullPolicy.NEVER)) return output
[ "def", "list_images", "(", "self", ")", ":", "# Fedora-Cloud-Base-27-1.6.x86_64 raw no 601.7M Sun 2017-11-05 08:30:10 CET \\", "# Sun 2017-11-05 08:30:10 CET", "data", "=", "os", ".", "listdir", "(", "CONU_IMAGES_STORE", ")", "output", "=", "[", "]", "for", "name", "in", "data", ":", "output", ".", "append", "(", "self", ".", "ImageClass", "(", "name", ",", "pull_policy", "=", "ImagePullPolicy", ".", "NEVER", ")", ")", "return", "output" ]
list all available nspawn images :return: collection of instances of :class:`conu.backend.nspawn.image.NspawnImage`
[ "list", "all", "available", "nspawn", "images" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L62-L74
train
user-cont/conu
conu/backend/nspawn/backend.py
NspawnBackend.cleanup_containers
def cleanup_containers(self): """ stop all container created by conu :return: None """ for cont in self.list_containers(): if CONU_ARTIFACT_TAG in cont.name: try: logger.debug("removing container %s created by conu", cont) # TODO: move this functionality to container.delete run_cmd(["machinectl", "terminate", cont.name]) except subprocess.CalledProcessError as e: logger.error("unable to remove container %s: %r", cont, e)
python
def cleanup_containers(self): """ stop all container created by conu :return: None """ for cont in self.list_containers(): if CONU_ARTIFACT_TAG in cont.name: try: logger.debug("removing container %s created by conu", cont) # TODO: move this functionality to container.delete run_cmd(["machinectl", "terminate", cont.name]) except subprocess.CalledProcessError as e: logger.error("unable to remove container %s: %r", cont, e)
[ "def", "cleanup_containers", "(", "self", ")", ":", "for", "cont", "in", "self", ".", "list_containers", "(", ")", ":", "if", "CONU_ARTIFACT_TAG", "in", "cont", ".", "name", ":", "try", ":", "logger", ".", "debug", "(", "\"removing container %s created by conu\"", ",", "cont", ")", "# TODO: move this functionality to container.delete", "run_cmd", "(", "[", "\"machinectl\"", ",", "\"terminate\"", ",", "cont", ".", "name", "]", ")", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "logger", ".", "error", "(", "\"unable to remove container %s: %r\"", ",", "cont", ",", "e", ")" ]
stop all container created by conu :return: None
[ "stop", "all", "container", "created", "by", "conu" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/nspawn/backend.py#L76-L89
train
user-cont/conu
conu/utils/__init__.py
check_port
def check_port(port, host, timeout=10): """ connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool """ logger.info("trying to open connection to %s:%s", host, port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(timeout) result = sock.connect_ex((host, port)) logger.info("was connection successful? errno: %s", result) if result == 0: logger.debug('port is opened: %s:%s' % (host, port)) return True else: logger.debug('port is closed: %s:%s' % (host, port)) return False finally: sock.close()
python
def check_port(port, host, timeout=10): """ connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool """ logger.info("trying to open connection to %s:%s", host, port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(timeout) result = sock.connect_ex((host, port)) logger.info("was connection successful? errno: %s", result) if result == 0: logger.debug('port is opened: %s:%s' % (host, port)) return True else: logger.debug('port is closed: %s:%s' % (host, port)) return False finally: sock.close()
[ "def", "check_port", "(", "port", ",", "host", ",", "timeout", "=", "10", ")", ":", "logger", ".", "info", "(", "\"trying to open connection to %s:%s\"", ",", "host", ",", "port", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "sock", ".", "settimeout", "(", "timeout", ")", "result", "=", "sock", ".", "connect_ex", "(", "(", "host", ",", "port", ")", ")", "logger", ".", "info", "(", "\"was connection successful? errno: %s\"", ",", "result", ")", "if", "result", "==", "0", ":", "logger", ".", "debug", "(", "'port is opened: %s:%s'", "%", "(", "host", ",", "port", ")", ")", "return", "True", "else", ":", "logger", ".", "debug", "(", "'port is closed: %s:%s'", "%", "(", "host", ",", "port", ")", ")", "return", "False", "finally", ":", "sock", ".", "close", "(", ")" ]
connect to port on host and return True on success :param port: int, port to check :param host: string, host address :param timeout: int, number of seconds spent trying :return: bool
[ "connect", "to", "port", "on", "host", "and", "return", "True", "on", "success" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L54-L76
train
user-cont/conu
conu/utils/__init__.py
get_selinux_status
def get_selinux_status(): """ get SELinux status of host :return: string, one of Enforced, Permissive, Disabled """ getenforce_command_exists() # alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is # empty (why?) and enforce doesn't tell whether SELinux is disabled or not o = run_cmd(["getenforce"], return_output=True).strip() # libselinux-utils logger.debug("SELinux is %r", o) return o
python
def get_selinux_status(): """ get SELinux status of host :return: string, one of Enforced, Permissive, Disabled """ getenforce_command_exists() # alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is # empty (why?) and enforce doesn't tell whether SELinux is disabled or not o = run_cmd(["getenforce"], return_output=True).strip() # libselinux-utils logger.debug("SELinux is %r", o) return o
[ "def", "get_selinux_status", "(", ")", ":", "getenforce_command_exists", "(", ")", "# alternatively, we could read directly from /sys/fs/selinux/{enforce,status}, but status is", "# empty (why?) and enforce doesn't tell whether SELinux is disabled or not", "o", "=", "run_cmd", "(", "[", "\"getenforce\"", "]", ",", "return_output", "=", "True", ")", ".", "strip", "(", ")", "# libselinux-utils", "logger", ".", "debug", "(", "\"SELinux is %r\"", ",", "o", ")", "return", "o" ]
get SELinux status of host :return: string, one of Enforced, Permissive, Disabled
[ "get", "SELinux", "status", "of", "host" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L79-L90
train
user-cont/conu
conu/utils/__init__.py
random_str
def random_str(size=10): """ create random string of selected size :param size: int, length of the string :return: the string """ return ''.join(random.choice(string.ascii_lowercase) for _ in range(size))
python
def random_str(size=10): """ create random string of selected size :param size: int, length of the string :return: the string """ return ''.join(random.choice(string.ascii_lowercase) for _ in range(size))
[ "def", "random_str", "(", "size", "=", "10", ")", ":", "return", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_lowercase", ")", "for", "_", "in", "range", "(", "size", ")", ")" ]
create random string of selected size :param size: int, length of the string :return: the string
[ "create", "random", "string", "of", "selected", "size" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L102-L109
train
user-cont/conu
conu/utils/__init__.py
run_cmd
def run_cmd(cmd, return_output=False, ignore_status=False, log_output=True, **kwargs): """ run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str """ logger.debug('command: "%s"' % ' '.join(cmd)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) output = process.communicate()[0] if log_output: logger.debug(output) if process.returncode > 0: if ignore_status: if return_output: return output else: return process.returncode else: raise subprocess.CalledProcessError(cmd=cmd, returncode=process.returncode) if return_output: return output
python
def run_cmd(cmd, return_output=False, ignore_status=False, log_output=True, **kwargs): """ run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str """ logger.debug('command: "%s"' % ' '.join(cmd)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) output = process.communicate()[0] if log_output: logger.debug(output) if process.returncode > 0: if ignore_status: if return_output: return output else: return process.returncode else: raise subprocess.CalledProcessError(cmd=cmd, returncode=process.returncode) if return_output: return output
[ "def", "run_cmd", "(", "cmd", ",", "return_output", "=", "False", ",", "ignore_status", "=", "False", ",", "log_output", "=", "True", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "debug", "(", "'command: \"%s\"'", "%", "' '", ".", "join", "(", "cmd", ")", ")", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ",", "*", "*", "kwargs", ")", "output", "=", "process", ".", "communicate", "(", ")", "[", "0", "]", "if", "log_output", ":", "logger", ".", "debug", "(", "output", ")", "if", "process", ".", "returncode", ">", "0", ":", "if", "ignore_status", ":", "if", "return_output", ":", "return", "output", "else", ":", "return", "process", ".", "returncode", "else", ":", "raise", "subprocess", ".", "CalledProcessError", "(", "cmd", "=", "cmd", ",", "returncode", "=", "process", ".", "returncode", ")", "if", "return_output", ":", "return", "output" ]
run provided command on host system using the same user as you invoked this code, raises subprocess.CalledProcessError if it fails :param cmd: list of str :param return_output: bool, return output of the command :param ignore_status: bool, do not fail in case nonzero return code :param log_output: bool, if True, log output to debug log :param kwargs: pass keyword arguments to subprocess.check_* functions; for more info, please check `help(subprocess.Popen)` :return: None or str
[ "run", "provided", "command", "on", "host", "system", "using", "the", "same", "user", "as", "you", "invoked", "this", "code", "raises", "subprocess", ".", "CalledProcessError", "if", "it", "fails" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L112-L141
train
user-cont/conu
conu/utils/__init__.py
command_exists
def command_exists(command, noop_invocation, exc_msg): """ Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown) """ try: found = bool(shutil.which(command)) # py3 only except AttributeError: # py2 branch try: p = subprocess.Popen(noop_invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: found = False else: stdout, stderr = p.communicate() found = p.returncode == 0 if not found: logger.error("`%s` exited with a non-zero return code (%s)", noop_invocation, p.returncode) logger.error("command stdout = %s", stdout) logger.error("command stderr = %s", stderr) if not found: raise CommandDoesNotExistException(exc_msg) return True
python
def command_exists(command, noop_invocation, exc_msg): """ Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown) """ try: found = bool(shutil.which(command)) # py3 only except AttributeError: # py2 branch try: p = subprocess.Popen(noop_invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: found = False else: stdout, stderr = p.communicate() found = p.returncode == 0 if not found: logger.error("`%s` exited with a non-zero return code (%s)", noop_invocation, p.returncode) logger.error("command stdout = %s", stdout) logger.error("command stderr = %s", stderr) if not found: raise CommandDoesNotExistException(exc_msg) return True
[ "def", "command_exists", "(", "command", ",", "noop_invocation", ",", "exc_msg", ")", ":", "try", ":", "found", "=", "bool", "(", "shutil", ".", "which", "(", "command", ")", ")", "# py3 only", "except", "AttributeError", ":", "# py2 branch", "try", ":", "p", "=", "subprocess", ".", "Popen", "(", "noop_invocation", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "except", "OSError", ":", "found", "=", "False", "else", ":", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "found", "=", "p", ".", "returncode", "==", "0", "if", "not", "found", ":", "logger", ".", "error", "(", "\"`%s` exited with a non-zero return code (%s)\"", ",", "noop_invocation", ",", "p", ".", "returncode", ")", "logger", ".", "error", "(", "\"command stdout = %s\"", ",", "stdout", ")", "logger", ".", "error", "(", "\"command stderr = %s\"", ",", "stderr", ")", "if", "not", "found", ":", "raise", "CommandDoesNotExistException", "(", "exc_msg", ")", "return", "True" ]
Verify that the provided command exists. Raise CommandDoesNotExistException in case of an error or if the command does not exist. :param command: str, command to check (python 3 only) :param noop_invocation: list of str, command to check (python 2 only) :param exc_msg: str, message of exception when command does not exist :return: bool, True if everything's all right (otherwise exception is thrown)
[ "Verify", "that", "the", "provided", "command", "exists", ".", "Raise", "CommandDoesNotExistException", "in", "case", "of", "an", "error", "or", "if", "the", "command", "does", "not", "exist", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L173-L200
train
user-cont/conu
conu/utils/__init__.py
check_docker_command_works
def check_docker_command_works(): """ Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown """ try: out = subprocess.check_output(["docker", "version"], stderr=subprocess.STDOUT, universal_newlines=True) except OSError: logger.info("docker binary is not available") raise CommandDoesNotExistException( "docker command doesn't seem to be available on your system. " "Please install and configure docker." ) except subprocess.CalledProcessError as ex: logger.error("exception: %s", ex) logger.error("rc: %s, output: %r", ex.returncode, ex.output) raise ConuException( "`docker version` call failed, it seems that your docker daemon is misconfigured or " "this user can't communicate with dockerd." ) else: logger.info("docker environment info: %r", out) return True
python
def check_docker_command_works(): """ Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown """ try: out = subprocess.check_output(["docker", "version"], stderr=subprocess.STDOUT, universal_newlines=True) except OSError: logger.info("docker binary is not available") raise CommandDoesNotExistException( "docker command doesn't seem to be available on your system. " "Please install and configure docker." ) except subprocess.CalledProcessError as ex: logger.error("exception: %s", ex) logger.error("rc: %s, output: %r", ex.returncode, ex.output) raise ConuException( "`docker version` call failed, it seems that your docker daemon is misconfigured or " "this user can't communicate with dockerd." ) else: logger.info("docker environment info: %r", out) return True
[ "def", "check_docker_command_works", "(", ")", ":", "try", ":", "out", "=", "subprocess", ".", "check_output", "(", "[", "\"docker\"", ",", "\"version\"", "]", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ")", "except", "OSError", ":", "logger", ".", "info", "(", "\"docker binary is not available\"", ")", "raise", "CommandDoesNotExistException", "(", "\"docker command doesn't seem to be available on your system. \"", "\"Please install and configure docker.\"", ")", "except", "subprocess", ".", "CalledProcessError", "as", "ex", ":", "logger", ".", "error", "(", "\"exception: %s\"", ",", "ex", ")", "logger", ".", "error", "(", "\"rc: %s, output: %r\"", ",", "ex", ".", "returncode", ",", "ex", ".", "output", ")", "raise", "ConuException", "(", "\"`docker version` call failed, it seems that your docker daemon is misconfigured or \"", "\"this user can't communicate with dockerd.\"", ")", "else", ":", "logger", ".", "info", "(", "\"docker environment info: %r\"", ",", "out", ")", "return", "True" ]
Verify that dockerd and docker binary works fine. This is performed by calling `docker version`, which also checks server API version. :return: bool, True if all is good, otherwise ConuException or CommandDoesNotExistException is thrown
[ "Verify", "that", "dockerd", "and", "docker", "binary", "works", "fine", ".", "This", "is", "performed", "by", "calling", "docker", "version", "which", "also", "checks", "server", "API", "version", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L250-L277
train
user-cont/conu
conu/utils/__init__.py
export_docker_container_to_directory
def export_docker_container_to_directory(client, container, path): """ take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None """ # we don't do this because of a bug in docker: # https://bugzilla.redhat.com/show_bug.cgi?id=1570828 # stream, _ = client.get_archive(container.get_id(), "/") check_docker_command_works() export_p = subprocess.Popen( ["docker", "export", container.get_id()], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) try: os.mkdir(path, 0o0700) except OSError as ex: if ex.errno == errno.EEXIST: logger.debug("mount point %s exists already", path) else: logger.error("mount point %s can't be created: %s", path, ex) raise logger.debug("about to untar the image") # we can't use tarfile because of --no-same-owner: files in containers are owned # by root and tarfile is trying to `chown 0 file` when running as an unpriv user p = subprocess.Popen( ["tar", "--no-same-owner", "-C", path, "-x"], stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) while True: data = export_p.stdout.read(1048576) if not data: break p.stdin.write(data) p.stdin.close() p.wait() export_p.wait() if export_p.returncode: logger.error(export_p.stderr.read()) raise ConuException("Failed to get rootfs of %s from docker." % container) if p.returncode: logger.error(p.stderr.read()) raise ConuException("Failed to unpack the archive.") logger.debug("image is unpacked")
python
def export_docker_container_to_directory(client, container, path): """ take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None """ # we don't do this because of a bug in docker: # https://bugzilla.redhat.com/show_bug.cgi?id=1570828 # stream, _ = client.get_archive(container.get_id(), "/") check_docker_command_works() export_p = subprocess.Popen( ["docker", "export", container.get_id()], stderr=subprocess.PIPE, stdout=subprocess.PIPE ) try: os.mkdir(path, 0o0700) except OSError as ex: if ex.errno == errno.EEXIST: logger.debug("mount point %s exists already", path) else: logger.error("mount point %s can't be created: %s", path, ex) raise logger.debug("about to untar the image") # we can't use tarfile because of --no-same-owner: files in containers are owned # by root and tarfile is trying to `chown 0 file` when running as an unpriv user p = subprocess.Popen( ["tar", "--no-same-owner", "-C", path, "-x"], stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) while True: data = export_p.stdout.read(1048576) if not data: break p.stdin.write(data) p.stdin.close() p.wait() export_p.wait() if export_p.returncode: logger.error(export_p.stderr.read()) raise ConuException("Failed to get rootfs of %s from docker." % container) if p.returncode: logger.error(p.stderr.read()) raise ConuException("Failed to unpack the archive.") logger.debug("image is unpacked")
[ "def", "export_docker_container_to_directory", "(", "client", ",", "container", ",", "path", ")", ":", "# we don't do this because of a bug in docker:", "# https://bugzilla.redhat.com/show_bug.cgi?id=1570828", "# stream, _ = client.get_archive(container.get_id(), \"/\")", "check_docker_command_works", "(", ")", "export_p", "=", "subprocess", ".", "Popen", "(", "[", "\"docker\"", ",", "\"export\"", ",", "container", ".", "get_id", "(", ")", "]", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "try", ":", "os", ".", "mkdir", "(", "path", ",", "0o0700", ")", "except", "OSError", "as", "ex", ":", "if", "ex", ".", "errno", "==", "errno", ".", "EEXIST", ":", "logger", ".", "debug", "(", "\"mount point %s exists already\"", ",", "path", ")", "else", ":", "logger", ".", "error", "(", "\"mount point %s can't be created: %s\"", ",", "path", ",", "ex", ")", "raise", "logger", ".", "debug", "(", "\"about to untar the image\"", ")", "# we can't use tarfile because of --no-same-owner: files in containers are owned", "# by root and tarfile is trying to `chown 0 file` when running as an unpriv user", "p", "=", "subprocess", ".", "Popen", "(", "[", "\"tar\"", ",", "\"--no-same-owner\"", ",", "\"-C\"", ",", "path", ",", "\"-x\"", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", ")", "while", "True", ":", "data", "=", "export_p", ".", "stdout", ".", "read", "(", "1048576", ")", "if", "not", "data", ":", "break", "p", ".", "stdin", ".", "write", "(", "data", ")", "p", ".", "stdin", ".", "close", "(", ")", "p", ".", "wait", "(", ")", "export_p", ".", "wait", "(", ")", "if", "export_p", ".", "returncode", ":", "logger", ".", "error", "(", "export_p", ".", "stderr", ".", "read", "(", ")", ")", "raise", "ConuException", "(", "\"Failed to get rootfs of %s from docker.\"", "%", "container", ")", "if", "p", ".", "returncode", ":", "logger", ".", "error", "(", "p", ".", "stderr", ".", "read", "(", ")", ")", "raise", "ConuException", "(", "\"Failed to unpack the archive.\"", ")", "logger", ".", "debug", "(", "\"image is unpacked\"", ")" ]
take selected docker container, create an archive out of it and unpack it to a selected location :param client: instance of docker.APIClient :param container: instance of DockerContainer :param path: str, path to a directory, doesn't need to exist :return: None
[ "take", "selected", "docker", "container", "create", "an", "archive", "out", "of", "it", "and", "unpack", "it", "to", "a", "selected", "location" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/__init__.py#L335-L386
train
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.get_version
def get_version(self): """ return 3-tuple of version info or None :return: (str, str, str) """ raw_version = run_cmd(["podman", "version"], return_output=True) regex = re.compile(r"Version:\s*(\d+)\.(\d+)\.(\d+)") match = regex.findall(raw_version) try: return match[0] except IndexError: logger.error("unable to parse version from `podman version`") return
python
def get_version(self): """ return 3-tuple of version info or None :return: (str, str, str) """ raw_version = run_cmd(["podman", "version"], return_output=True) regex = re.compile(r"Version:\s*(\d+)\.(\d+)\.(\d+)") match = regex.findall(raw_version) try: return match[0] except IndexError: logger.error("unable to parse version from `podman version`") return
[ "def", "get_version", "(", "self", ")", ":", "raw_version", "=", "run_cmd", "(", "[", "\"podman\"", ",", "\"version\"", "]", ",", "return_output", "=", "True", ")", "regex", "=", "re", ".", "compile", "(", "r\"Version:\\s*(\\d+)\\.(\\d+)\\.(\\d+)\"", ")", "match", "=", "regex", ".", "findall", "(", "raw_version", ")", "try", ":", "return", "match", "[", "0", "]", "except", "IndexError", ":", "logger", ".", "error", "(", "\"unable to parse version from `podman version`\"", ")", "return" ]
return 3-tuple of version info or None :return: (str, str, str)
[ "return", "3", "-", "tuple", "of", "version", "info", "or", "None" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L92-L105
train
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.list_containers
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container["ID"] name = container["Names"] image_name = container["Image"] try: image_name, image_tag = parse_reference(image_name) except (IndexError, TypeError): image_name, image_tag = None, None image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) return containers
python
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container["ID"] name = container["Names"] image_name = container["Image"] try: image_name, image_tag = parse_reference(image_name) except (IndexError, TypeError): image_name, image_tag = None, None image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) return containers
[ "def", "list_containers", "(", "self", ")", ":", "containers", "=", "[", "]", "for", "container", "in", "self", ".", "_list_podman_containers", "(", ")", ":", "identifier", "=", "container", "[", "\"ID\"", "]", "name", "=", "container", "[", "\"Names\"", "]", "image_name", "=", "container", "[", "\"Image\"", "]", "try", ":", "image_name", ",", "image_tag", "=", "parse_reference", "(", "image_name", ")", "except", "(", "IndexError", ",", "TypeError", ")", ":", "image_name", ",", "image_tag", "=", "None", ",", "None", "image", "=", "PodmanImage", "(", "image_name", ",", "tag", "=", "image_tag", ",", "identifier", "=", "None", ")", "container", "=", "PodmanContainer", "(", "image", ",", "identifier", ",", "name", "=", "name", ")", "containers", ".", "append", "(", "container", ")", "return", "containers" ]
List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer`
[ "List", "all", "available", "podman", "containers", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L116-L137
train
user-cont/conu
conu/backend/podman/backend.py
PodmanBackend.list_images
def list_images(self): """ List all available podman images. :return: collection of instances of :class:`conu.PodmanImage` """ images = [] for image in self._list_all_podman_images(): try: i_name, tag = parse_reference(image["names"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = PodmanImage(i_name, tag=tag, identifier=image["id"], pull_policy=PodmanImagePullPolicy.NEVER) images.append(d_im) return images
python
def list_images(self): """ List all available podman images. :return: collection of instances of :class:`conu.PodmanImage` """ images = [] for image in self._list_all_podman_images(): try: i_name, tag = parse_reference(image["names"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = PodmanImage(i_name, tag=tag, identifier=image["id"], pull_policy=PodmanImagePullPolicy.NEVER) images.append(d_im) return images
[ "def", "list_images", "(", "self", ")", ":", "images", "=", "[", "]", "for", "image", "in", "self", ".", "_list_all_podman_images", "(", ")", ":", "try", ":", "i_name", ",", "tag", "=", "parse_reference", "(", "image", "[", "\"names\"", "]", "[", "0", "]", ")", "except", "(", "IndexError", ",", "TypeError", ")", ":", "i_name", ",", "tag", "=", "None", ",", "None", "d_im", "=", "PodmanImage", "(", "i_name", ",", "tag", "=", "tag", ",", "identifier", "=", "image", "[", "\"id\"", "]", ",", "pull_policy", "=", "PodmanImagePullPolicy", ".", "NEVER", ")", "images", ".", "append", "(", "d_im", ")", "return", "images" ]
List all available podman images. :return: collection of instances of :class:`conu.PodmanImage`
[ "List", "all", "available", "podman", "images", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/podman/backend.py#L139-L155
train
user-cont/conu
conu/backend/docker/utils.py
inspect_to_metadata
def inspect_to_metadata(metadata_object, inspect_data): """ process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata """ identifier = graceful_get(inspect_data, 'Id') if identifier: if ":" in identifier: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 metadata_object.identifier = identifier.split(':')[1] else: # container metadata_object.identifier = identifier # format of Environment Variables from docker inspect: # ['DISTTAG=f26container', 'FGC=f26'] raw_env_vars = graceful_get(inspect_data, "Config", "Env") or [] if raw_env_vars: metadata_object.env_variables = {} for env_variable in raw_env_vars: splits = env_variable.split("=", 1) name = splits[0] value = splits[1] if len(splits) > 1 else None if value is not None: metadata_object.env_variables.update({name: value}) raw_exposed_ports = graceful_get(inspect_data, "Config", "ExposedPorts") if raw_exposed_ports: metadata_object.exposed_ports = list(raw_exposed_ports.keys()) # specific to images raw_repo_tags = graceful_get(inspect_data, 'RepoTags') if raw_repo_tags: metadata_object.name = raw_repo_tags[0] metadata_object.labels = graceful_get(inspect_data, 'Config', 'Labels') metadata_object.command = graceful_get(inspect_data, 'Config', 'Cmd') metadata_object.creation_timestamp = inspect_data.get('Created', None) # specific to images metadata_object.image_names = inspect_data.get('RepoTags', None) # specific to images digests = inspect_data.get("RepoDigests", None) if digests: metadata_object.repo_digests = digests metadata_object.digest = digests[0] return metadata_object
python
def inspect_to_metadata(metadata_object, inspect_data): """ process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata """ identifier = graceful_get(inspect_data, 'Id') if identifier: if ":" in identifier: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 metadata_object.identifier = identifier.split(':')[1] else: # container metadata_object.identifier = identifier # format of Environment Variables from docker inspect: # ['DISTTAG=f26container', 'FGC=f26'] raw_env_vars = graceful_get(inspect_data, "Config", "Env") or [] if raw_env_vars: metadata_object.env_variables = {} for env_variable in raw_env_vars: splits = env_variable.split("=", 1) name = splits[0] value = splits[1] if len(splits) > 1 else None if value is not None: metadata_object.env_variables.update({name: value}) raw_exposed_ports = graceful_get(inspect_data, "Config", "ExposedPorts") if raw_exposed_ports: metadata_object.exposed_ports = list(raw_exposed_ports.keys()) # specific to images raw_repo_tags = graceful_get(inspect_data, 'RepoTags') if raw_repo_tags: metadata_object.name = raw_repo_tags[0] metadata_object.labels = graceful_get(inspect_data, 'Config', 'Labels') metadata_object.command = graceful_get(inspect_data, 'Config', 'Cmd') metadata_object.creation_timestamp = inspect_data.get('Created', None) # specific to images metadata_object.image_names = inspect_data.get('RepoTags', None) # specific to images digests = inspect_data.get("RepoDigests", None) if digests: metadata_object.repo_digests = digests metadata_object.digest = digests[0] return metadata_object
[ "def", "inspect_to_metadata", "(", "metadata_object", ",", "inspect_data", ")", ":", "identifier", "=", "graceful_get", "(", "inspect_data", ",", "'Id'", ")", "if", "identifier", ":", "if", "\":\"", "in", "identifier", ":", "# format of image name from docker inspect:", "# sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129", "metadata_object", ".", "identifier", "=", "identifier", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "# container", "metadata_object", ".", "identifier", "=", "identifier", "# format of Environment Variables from docker inspect:", "# ['DISTTAG=f26container', 'FGC=f26']", "raw_env_vars", "=", "graceful_get", "(", "inspect_data", ",", "\"Config\"", ",", "\"Env\"", ")", "or", "[", "]", "if", "raw_env_vars", ":", "metadata_object", ".", "env_variables", "=", "{", "}", "for", "env_variable", "in", "raw_env_vars", ":", "splits", "=", "env_variable", ".", "split", "(", "\"=\"", ",", "1", ")", "name", "=", "splits", "[", "0", "]", "value", "=", "splits", "[", "1", "]", "if", "len", "(", "splits", ")", ">", "1", "else", "None", "if", "value", "is", "not", "None", ":", "metadata_object", ".", "env_variables", ".", "update", "(", "{", "name", ":", "value", "}", ")", "raw_exposed_ports", "=", "graceful_get", "(", "inspect_data", ",", "\"Config\"", ",", "\"ExposedPorts\"", ")", "if", "raw_exposed_ports", ":", "metadata_object", ".", "exposed_ports", "=", "list", "(", "raw_exposed_ports", ".", "keys", "(", ")", ")", "# specific to images", "raw_repo_tags", "=", "graceful_get", "(", "inspect_data", ",", "'RepoTags'", ")", "if", "raw_repo_tags", ":", "metadata_object", ".", "name", "=", "raw_repo_tags", "[", "0", "]", "metadata_object", ".", "labels", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Labels'", ")", "metadata_object", ".", "command", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Cmd'", ")", "metadata_object", ".", "creation_timestamp", "=", "inspect_data", ".", "get", "(", "'Created'", ",", "None", ")", "# specific to images", "metadata_object", ".", "image_names", "=", "inspect_data", ".", "get", "(", "'RepoTags'", ",", "None", ")", "# specific to images", "digests", "=", "inspect_data", ".", "get", "(", "\"RepoDigests\"", ",", "None", ")", "if", "digests", ":", "metadata_object", ".", "repo_digests", "=", "digests", "metadata_object", ".", "digest", "=", "digests", "[", "0", "]", "return", "metadata_object" ]
process data from `docker inspect` and update provided metadata object :param metadata_object: instance of Metadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :return: instance of Metadata
[ "process", "data", "from", "docker", "inspect", "and", "update", "provided", "metadata", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/utils.py#L13-L62
train
user-cont/conu
conu/backend/docker/utils.py
inspect_to_container_metadata
def inspect_to_container_metadata(c_metadata_object, inspect_data, image_instance): """ process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata """ inspect_to_metadata(c_metadata_object, inspect_data) status = ContainerStatus.get_from_docker( graceful_get(inspect_data, "State", "Status"), graceful_get(inspect_data, "State", "ExitCode"), ) image_id = graceful_get(inspect_data, "Image") if image_id: if ":" in image_id: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 image_instance.identifier = image_id.split(':')[1] else: # container image_instance.identifier = image_id # format of Port mappings from docker inspect: # {'12345/tcp': [ # {'HostIp': '0.0.0.0', 'HostPort': '123'}, # {'HostIp': '0.0.0.0', 'HostPort': '1234'}]} port_mappings = dict() raw_port_mappings = graceful_get(inspect_data, 'HostConfig', 'PortBindings') or {} for key, value in raw_port_mappings.items(): for item in value: logger.debug("parsing ports: key = %s, item = %s", key, item) li = port_mappings.get(key, []) raw_host_port = item['HostPort'] if raw_host_port == "": int_port = None else: try: int_port = int(raw_host_port) except ValueError as ex: logger.error("could not parse port: %s", ex) continue li.append(int_port) port_mappings.update({key: li}) c_metadata_object.status = status c_metadata_object.port_mappings = port_mappings c_metadata_object.hostname = graceful_get(inspect_data, 'Config', 'Hostname') raw_networks = graceful_get(inspect_data, "NetworkSettings", "Networks").values() if raw_networks: c_metadata_object.ipv4_addresses = [ graceful_get(x, "IPAddress") for x in raw_networks if graceful_get(x, "IPAddress")] c_metadata_object.ipv6_addresses = [ graceful_get(x, "GlobalIPv6Address") for x in raw_networks if graceful_get(x, "GlobalIPv6Address")] c_metadata_object.image = image_instance name = graceful_get(inspect_data, "Name") if name: name = name[1:] if name.startswith("/") else name # remove / at the beginning c_metadata_object.name = name return c_metadata_object
python
def inspect_to_container_metadata(c_metadata_object, inspect_data, image_instance): """ process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata """ inspect_to_metadata(c_metadata_object, inspect_data) status = ContainerStatus.get_from_docker( graceful_get(inspect_data, "State", "Status"), graceful_get(inspect_data, "State", "ExitCode"), ) image_id = graceful_get(inspect_data, "Image") if image_id: if ":" in image_id: # format of image name from docker inspect: # sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129 image_instance.identifier = image_id.split(':')[1] else: # container image_instance.identifier = image_id # format of Port mappings from docker inspect: # {'12345/tcp': [ # {'HostIp': '0.0.0.0', 'HostPort': '123'}, # {'HostIp': '0.0.0.0', 'HostPort': '1234'}]} port_mappings = dict() raw_port_mappings = graceful_get(inspect_data, 'HostConfig', 'PortBindings') or {} for key, value in raw_port_mappings.items(): for item in value: logger.debug("parsing ports: key = %s, item = %s", key, item) li = port_mappings.get(key, []) raw_host_port = item['HostPort'] if raw_host_port == "": int_port = None else: try: int_port = int(raw_host_port) except ValueError as ex: logger.error("could not parse port: %s", ex) continue li.append(int_port) port_mappings.update({key: li}) c_metadata_object.status = status c_metadata_object.port_mappings = port_mappings c_metadata_object.hostname = graceful_get(inspect_data, 'Config', 'Hostname') raw_networks = graceful_get(inspect_data, "NetworkSettings", "Networks").values() if raw_networks: c_metadata_object.ipv4_addresses = [ graceful_get(x, "IPAddress") for x in raw_networks if graceful_get(x, "IPAddress")] c_metadata_object.ipv6_addresses = [ graceful_get(x, "GlobalIPv6Address") for x in raw_networks if graceful_get(x, "GlobalIPv6Address")] c_metadata_object.image = image_instance name = graceful_get(inspect_data, "Name") if name: name = name[1:] if name.startswith("/") else name # remove / at the beginning c_metadata_object.name = name return c_metadata_object
[ "def", "inspect_to_container_metadata", "(", "c_metadata_object", ",", "inspect_data", ",", "image_instance", ")", ":", "inspect_to_metadata", "(", "c_metadata_object", ",", "inspect_data", ")", "status", "=", "ContainerStatus", ".", "get_from_docker", "(", "graceful_get", "(", "inspect_data", ",", "\"State\"", ",", "\"Status\"", ")", ",", "graceful_get", "(", "inspect_data", ",", "\"State\"", ",", "\"ExitCode\"", ")", ",", ")", "image_id", "=", "graceful_get", "(", "inspect_data", ",", "\"Image\"", ")", "if", "image_id", ":", "if", "\":\"", "in", "image_id", ":", "# format of image name from docker inspect:", "# sha256:8f0e66c924c0c169352de487a3c2463d82da24e9442fc097dddaa5f800df7129", "image_instance", ".", "identifier", "=", "image_id", ".", "split", "(", "':'", ")", "[", "1", "]", "else", ":", "# container", "image_instance", ".", "identifier", "=", "image_id", "# format of Port mappings from docker inspect:", "# {'12345/tcp': [", "# {'HostIp': '0.0.0.0', 'HostPort': '123'},", "# {'HostIp': '0.0.0.0', 'HostPort': '1234'}]}", "port_mappings", "=", "dict", "(", ")", "raw_port_mappings", "=", "graceful_get", "(", "inspect_data", ",", "'HostConfig'", ",", "'PortBindings'", ")", "or", "{", "}", "for", "key", ",", "value", "in", "raw_port_mappings", ".", "items", "(", ")", ":", "for", "item", "in", "value", ":", "logger", ".", "debug", "(", "\"parsing ports: key = %s, item = %s\"", ",", "key", ",", "item", ")", "li", "=", "port_mappings", ".", "get", "(", "key", ",", "[", "]", ")", "raw_host_port", "=", "item", "[", "'HostPort'", "]", "if", "raw_host_port", "==", "\"\"", ":", "int_port", "=", "None", "else", ":", "try", ":", "int_port", "=", "int", "(", "raw_host_port", ")", "except", "ValueError", "as", "ex", ":", "logger", ".", "error", "(", "\"could not parse port: %s\"", ",", "ex", ")", "continue", "li", ".", "append", "(", "int_port", ")", "port_mappings", ".", "update", "(", "{", "key", ":", "li", "}", ")", "c_metadata_object", ".", "status", "=", "status", "c_metadata_object", ".", "port_mappings", "=", "port_mappings", "c_metadata_object", ".", "hostname", "=", "graceful_get", "(", "inspect_data", ",", "'Config'", ",", "'Hostname'", ")", "raw_networks", "=", "graceful_get", "(", "inspect_data", ",", "\"NetworkSettings\"", ",", "\"Networks\"", ")", ".", "values", "(", ")", "if", "raw_networks", ":", "c_metadata_object", ".", "ipv4_addresses", "=", "[", "graceful_get", "(", "x", ",", "\"IPAddress\"", ")", "for", "x", "in", "raw_networks", "if", "graceful_get", "(", "x", ",", "\"IPAddress\"", ")", "]", "c_metadata_object", ".", "ipv6_addresses", "=", "[", "graceful_get", "(", "x", ",", "\"GlobalIPv6Address\"", ")", "for", "x", "in", "raw_networks", "if", "graceful_get", "(", "x", ",", "\"GlobalIPv6Address\"", ")", "]", "c_metadata_object", ".", "image", "=", "image_instance", "name", "=", "graceful_get", "(", "inspect_data", ",", "\"Name\"", ")", "if", "name", ":", "name", "=", "name", "[", "1", ":", "]", "if", "name", ".", "startswith", "(", "\"/\"", ")", "else", "name", "# remove / at the beginning", "c_metadata_object", ".", "name", "=", "name", "return", "c_metadata_object" ]
process data from `docker container inspect` and update provided container metadata object :param c_metadata_object: instance of ContainerMetadata :param inspect_data: dict, metadata from `docker inspect` or `dockert_client.images()` :param image_instance: instance of DockerImage :return: instance of ContainerMetadata
[ "process", "data", "from", "docker", "container", "inspect", "and", "update", "provided", "container", "metadata", "object" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/utils.py#L65-L132
train
user-cont/conu
conu/backend/k8s/backend.py
K8sBackend.list_pods
def list_pods(self, namespace=None): """ List all available pods. :param namespace: str, if not specified list pods for all namespaces :return: collection of instances of :class:`conu.backend.k8s.pod.Pod` """ if namespace: return [Pod(name=p.metadata.name, namespace=namespace, spec=p.spec) for p in self.core_api.list_namespaced_pod(namespace, watch=False).items] return [Pod(name=p.metadata.name, namespace=p.metadata.namespace, spec=p.spec) for p in self.core_api.list_pod_for_all_namespaces(watch=False).items]
python
def list_pods(self, namespace=None): """ List all available pods. :param namespace: str, if not specified list pods for all namespaces :return: collection of instances of :class:`conu.backend.k8s.pod.Pod` """ if namespace: return [Pod(name=p.metadata.name, namespace=namespace, spec=p.spec) for p in self.core_api.list_namespaced_pod(namespace, watch=False).items] return [Pod(name=p.metadata.name, namespace=p.metadata.namespace, spec=p.spec) for p in self.core_api.list_pod_for_all_namespaces(watch=False).items]
[ "def", "list_pods", "(", "self", ",", "namespace", "=", "None", ")", ":", "if", "namespace", ":", "return", "[", "Pod", "(", "name", "=", "p", ".", "metadata", ".", "name", ",", "namespace", "=", "namespace", ",", "spec", "=", "p", ".", "spec", ")", "for", "p", "in", "self", ".", "core_api", ".", "list_namespaced_pod", "(", "namespace", ",", "watch", "=", "False", ")", ".", "items", "]", "return", "[", "Pod", "(", "name", "=", "p", ".", "metadata", ".", "name", ",", "namespace", "=", "p", ".", "metadata", ".", "namespace", ",", "spec", "=", "p", ".", "spec", ")", "for", "p", "in", "self", ".", "core_api", ".", "list_pod_for_all_namespaces", "(", "watch", "=", "False", ")", ".", "items", "]" ]
List all available pods. :param namespace: str, if not specified list pods for all namespaces :return: collection of instances of :class:`conu.backend.k8s.pod.Pod`
[ "List", "all", "available", "pods", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/k8s/backend.py#L70-L83
train
user-cont/conu
conu/backend/k8s/backend.py
K8sBackend.list_services
def list_services(self, namespace=None): """ List all available services. :param namespace: str, if not specified list services for all namespaces :return: collection of instances of :class:`conu.backend.k8s.service.Service` """ if namespace: return [Service(name=s.metadata.name, ports=k8s_ports_to_metadata_ports(s.spec.ports), namespace=s.metadata.namespace, labels=s.metadata.labels, selector=s.spec.selector, spec=s.spec) for s in self.core_api.list_namespaced_service(namespace, watch=False).items] return [Service(name=s.metadata.name, ports=k8s_ports_to_metadata_ports(s.spec.ports), namespace=s.metadata.namespace, labels=s.metadata.labels, selector=s.spec.selector, spec=s.spec) for s in self.core_api.list_service_for_all_namespaces(watch=False).items]
python
def list_services(self, namespace=None): """ List all available services. :param namespace: str, if not specified list services for all namespaces :return: collection of instances of :class:`conu.backend.k8s.service.Service` """ if namespace: return [Service(name=s.metadata.name, ports=k8s_ports_to_metadata_ports(s.spec.ports), namespace=s.metadata.namespace, labels=s.metadata.labels, selector=s.spec.selector, spec=s.spec) for s in self.core_api.list_namespaced_service(namespace, watch=False).items] return [Service(name=s.metadata.name, ports=k8s_ports_to_metadata_ports(s.spec.ports), namespace=s.metadata.namespace, labels=s.metadata.labels, selector=s.spec.selector, spec=s.spec) for s in self.core_api.list_service_for_all_namespaces(watch=False).items]
[ "def", "list_services", "(", "self", ",", "namespace", "=", "None", ")", ":", "if", "namespace", ":", "return", "[", "Service", "(", "name", "=", "s", ".", "metadata", ".", "name", ",", "ports", "=", "k8s_ports_to_metadata_ports", "(", "s", ".", "spec", ".", "ports", ")", ",", "namespace", "=", "s", ".", "metadata", ".", "namespace", ",", "labels", "=", "s", ".", "metadata", ".", "labels", ",", "selector", "=", "s", ".", "spec", ".", "selector", ",", "spec", "=", "s", ".", "spec", ")", "for", "s", "in", "self", ".", "core_api", ".", "list_namespaced_service", "(", "namespace", ",", "watch", "=", "False", ")", ".", "items", "]", "return", "[", "Service", "(", "name", "=", "s", ".", "metadata", ".", "name", ",", "ports", "=", "k8s_ports_to_metadata_ports", "(", "s", ".", "spec", ".", "ports", ")", ",", "namespace", "=", "s", ".", "metadata", ".", "namespace", ",", "labels", "=", "s", ".", "metadata", ".", "labels", ",", "selector", "=", "s", ".", "spec", ".", "selector", ",", "spec", "=", "s", ".", "spec", ")", "for", "s", "in", "self", ".", "core_api", ".", "list_service_for_all_namespaces", "(", "watch", "=", "False", ")", ".", "items", "]" ]
List all available services. :param namespace: str, if not specified list services for all namespaces :return: collection of instances of :class:`conu.backend.k8s.service.Service`
[ "List", "all", "available", "services", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/k8s/backend.py#L85-L104
train
user-cont/conu
conu/backend/k8s/backend.py
K8sBackend.list_deployments
def list_deployments(self, namespace=None): """ List all available deployments. :param namespace: str, if not specified list deployments for all namespaces :return: collection of instances of :class:`conu.backend.k8s.deployment.Deployment` """ if namespace: return [Deployment(name=d.metadata.name, namespace=d.metadata.namespace, labels=d.metadata.labels, selector=d.spec.selector, image_metadata=ImageMetadata( name=d.spec.template.spec.containers[0].name.split("-", 1)[0])) for d in self.apps_api.list_namespaced_deployment(namespace, watch=False).items] return [Deployment(name=d.metadata.name, namespace=d.metadata.namespace, labels=d.metadata.labels, selector=d.spec.selector, image_metadata=ImageMetadata( name=d.spec.template.spec.containers[0].name.split("-", 1)[0])) for d in self.apps_api.list_deployment_for_all_namespaces(watch=False).items]
python
def list_deployments(self, namespace=None): """ List all available deployments. :param namespace: str, if not specified list deployments for all namespaces :return: collection of instances of :class:`conu.backend.k8s.deployment.Deployment` """ if namespace: return [Deployment(name=d.metadata.name, namespace=d.metadata.namespace, labels=d.metadata.labels, selector=d.spec.selector, image_metadata=ImageMetadata( name=d.spec.template.spec.containers[0].name.split("-", 1)[0])) for d in self.apps_api.list_namespaced_deployment(namespace, watch=False).items] return [Deployment(name=d.metadata.name, namespace=d.metadata.namespace, labels=d.metadata.labels, selector=d.spec.selector, image_metadata=ImageMetadata( name=d.spec.template.spec.containers[0].name.split("-", 1)[0])) for d in self.apps_api.list_deployment_for_all_namespaces(watch=False).items]
[ "def", "list_deployments", "(", "self", ",", "namespace", "=", "None", ")", ":", "if", "namespace", ":", "return", "[", "Deployment", "(", "name", "=", "d", ".", "metadata", ".", "name", ",", "namespace", "=", "d", ".", "metadata", ".", "namespace", ",", "labels", "=", "d", ".", "metadata", ".", "labels", ",", "selector", "=", "d", ".", "spec", ".", "selector", ",", "image_metadata", "=", "ImageMetadata", "(", "name", "=", "d", ".", "spec", ".", "template", ".", "spec", ".", "containers", "[", "0", "]", ".", "name", ".", "split", "(", "\"-\"", ",", "1", ")", "[", "0", "]", ")", ")", "for", "d", "in", "self", ".", "apps_api", ".", "list_namespaced_deployment", "(", "namespace", ",", "watch", "=", "False", ")", ".", "items", "]", "return", "[", "Deployment", "(", "name", "=", "d", ".", "metadata", ".", "name", ",", "namespace", "=", "d", ".", "metadata", ".", "namespace", ",", "labels", "=", "d", ".", "metadata", ".", "labels", ",", "selector", "=", "d", ".", "spec", ".", "selector", ",", "image_metadata", "=", "ImageMetadata", "(", "name", "=", "d", ".", "spec", ".", "template", ".", "spec", ".", "containers", "[", "0", "]", ".", "name", ".", "split", "(", "\"-\"", ",", "1", ")", "[", "0", "]", ")", ")", "for", "d", "in", "self", ".", "apps_api", ".", "list_deployment_for_all_namespaces", "(", "watch", "=", "False", ")", ".", "items", "]" ]
List all available deployments. :param namespace: str, if not specified list deployments for all namespaces :return: collection of instances of :class:`conu.backend.k8s.deployment.Deployment`
[ "List", "all", "available", "deployments", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/k8s/backend.py#L106-L127
train
user-cont/conu
conu/utils/http_client.py
get_url
def get_url(path, host, port, method="http"): """ make url from path, host and port :param method: str :param path: str, path within the request, e.g. "/api/version" :param host: str :param port: str or int :return: str """ return urlunsplit( (method, "%s:%s" % (host, port), path, "", "") )
python
def get_url(path, host, port, method="http"): """ make url from path, host and port :param method: str :param path: str, path within the request, e.g. "/api/version" :param host: str :param port: str or int :return: str """ return urlunsplit( (method, "%s:%s" % (host, port), path, "", "") )
[ "def", "get_url", "(", "path", ",", "host", ",", "port", ",", "method", "=", "\"http\"", ")", ":", "return", "urlunsplit", "(", "(", "method", ",", "\"%s:%s\"", "%", "(", "host", ",", "port", ")", ",", "path", ",", "\"\"", ",", "\"\"", ")", ")" ]
make url from path, host and port :param method: str :param path: str, path within the request, e.g. "/api/version" :param host: str :param port: str or int :return: str
[ "make", "url", "from", "path", "host", "and", "port" ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/utils/http_client.py#L20-L32
train
user-cont/conu
conu/backend/docker/backend.py
DockerBackend.list_containers
def list_containers(self): """ List all available docker containers. Container objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerContainer` """ result = [] for c in self.d.containers(all=True): name = None names = c.get("Names", None) if names: name = names[0] i = DockerImage(None, identifier=c["ImageID"]) cont = DockerContainer(i, c["Id"], name=name) # TODO: docker_client.containers produces different metadata than inspect inspect_to_container_metadata(cont.metadata, c, i) result.append(cont) return result
python
def list_containers(self): """ List all available docker containers. Container objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerContainer` """ result = [] for c in self.d.containers(all=True): name = None names = c.get("Names", None) if names: name = names[0] i = DockerImage(None, identifier=c["ImageID"]) cont = DockerContainer(i, c["Id"], name=name) # TODO: docker_client.containers produces different metadata than inspect inspect_to_container_metadata(cont.metadata, c, i) result.append(cont) return result
[ "def", "list_containers", "(", "self", ")", ":", "result", "=", "[", "]", "for", "c", "in", "self", ".", "d", ".", "containers", "(", "all", "=", "True", ")", ":", "name", "=", "None", "names", "=", "c", ".", "get", "(", "\"Names\"", ",", "None", ")", "if", "names", ":", "name", "=", "names", "[", "0", "]", "i", "=", "DockerImage", "(", "None", ",", "identifier", "=", "c", "[", "\"ImageID\"", "]", ")", "cont", "=", "DockerContainer", "(", "i", ",", "c", "[", "\"Id\"", "]", ",", "name", "=", "name", ")", "# TODO: docker_client.containers produces different metadata than inspect", "inspect_to_container_metadata", "(", "cont", ".", "metadata", ",", "c", ",", "i", ")", "result", ".", "append", "(", "cont", ")", "return", "result" ]
List all available docker containers. Container objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerContainer`
[ "List", "all", "available", "docker", "containers", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/backend.py#L85-L106
train
user-cont/conu
conu/backend/docker/backend.py
DockerBackend.list_images
def list_images(self): """ List all available docker images. Image objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerImage` """ response = [] for im in self.d.images(): try: i_name, tag = parse_reference(im["RepoTags"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = DockerImage(i_name, tag=tag, identifier=im["Id"], pull_policy=DockerImagePullPolicy.NEVER) inspect_to_metadata(d_im.metadata, im) response.append(d_im) return response
python
def list_images(self): """ List all available docker images. Image objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerImage` """ response = [] for im in self.d.images(): try: i_name, tag = parse_reference(im["RepoTags"][0]) except (IndexError, TypeError): i_name, tag = None, None d_im = DockerImage(i_name, tag=tag, identifier=im["Id"], pull_policy=DockerImagePullPolicy.NEVER) inspect_to_metadata(d_im.metadata, im) response.append(d_im) return response
[ "def", "list_images", "(", "self", ")", ":", "response", "=", "[", "]", "for", "im", "in", "self", ".", "d", ".", "images", "(", ")", ":", "try", ":", "i_name", ",", "tag", "=", "parse_reference", "(", "im", "[", "\"RepoTags\"", "]", "[", "0", "]", ")", "except", "(", "IndexError", ",", "TypeError", ")", ":", "i_name", ",", "tag", "=", "None", ",", "None", "d_im", "=", "DockerImage", "(", "i_name", ",", "tag", "=", "tag", ",", "identifier", "=", "im", "[", "\"Id\"", "]", ",", "pull_policy", "=", "DockerImagePullPolicy", ".", "NEVER", ")", "inspect_to_metadata", "(", "d_im", ".", "metadata", ",", "im", ")", "response", ".", "append", "(", "d_im", ")", "return", "response" ]
List all available docker images. Image objects returned from this methods will contain a limited amount of metadata in property `short_metadata`. These are just a subset of `.inspect()`, but don't require an API call against dockerd. :return: collection of instances of :class:`conu.DockerImage`
[ "List", "all", "available", "docker", "images", "." ]
08caae7bb6bdd265b55bb106c3da6a7946a5a352
https://github.com/user-cont/conu/blob/08caae7bb6bdd265b55bb106c3da6a7946a5a352/conu/backend/docker/backend.py#L108-L129
train
mapbox/mapbox-cli-py
mapboxcli/scripts/mapmatching.py
match
def match(ctx, features, profile, gps_precision): """Mapbox Map Matching API lets you use snap your GPS traces to the OpenStreetMap road and path network. $ mapbox mapmatching trace.geojson An access token is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None features = list(features) if len(features) != 1: raise click.BadParameter( "Mapmatching requires a single LineString feature") service = mapbox.MapMatcher(access_token=access_token) try: res = service.match( features[0], profile=profile, gps_precision=gps_precision) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: stdout = click.open_file('-', 'w') click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
python
def match(ctx, features, profile, gps_precision): """Mapbox Map Matching API lets you use snap your GPS traces to the OpenStreetMap road and path network. $ mapbox mapmatching trace.geojson An access token is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None features = list(features) if len(features) != 1: raise click.BadParameter( "Mapmatching requires a single LineString feature") service = mapbox.MapMatcher(access_token=access_token) try: res = service.match( features[0], profile=profile, gps_precision=gps_precision) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: stdout = click.open_file('-', 'w') click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
[ "def", "match", "(", "ctx", ",", "features", ",", "profile", ",", "gps_precision", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "'access_token'", ")", ")", "or", "None", "features", "=", "list", "(", "features", ")", "if", "len", "(", "features", ")", "!=", "1", ":", "raise", "click", ".", "BadParameter", "(", "\"Mapmatching requires a single LineString feature\"", ")", "service", "=", "mapbox", ".", "MapMatcher", "(", "access_token", "=", "access_token", ")", "try", ":", "res", "=", "service", ".", "match", "(", "features", "[", "0", "]", ",", "profile", "=", "profile", ",", "gps_precision", "=", "gps_precision", ")", "except", "mapbox", ".", "errors", ".", "ValidationError", "as", "exc", ":", "raise", "click", ".", "BadParameter", "(", "str", "(", "exc", ")", ")", "if", "res", ".", "status_code", "==", "200", ":", "stdout", "=", "click", ".", "open_file", "(", "'-'", ",", "'w'", ")", "click", ".", "echo", "(", "res", ".", "text", ",", "file", "=", "stdout", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Mapbox Map Matching API lets you use snap your GPS traces to the OpenStreetMap road and path network. $ mapbox mapmatching trace.geojson An access token is required, see `mapbox --help`.
[ "Mapbox", "Map", "Matching", "API", "lets", "you", "use", "snap", "your", "GPS", "traces", "to", "the", "OpenStreetMap", "road", "and", "path", "network", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/mapmatching.py#L15-L43
train
mapbox/mapbox-cli-py
mapboxcli/scripts/static.py
staticmap
def staticmap(ctx, mapid, output, features, lat, lon, zoom, size): """ Generate static map images from existing Mapbox map ids. Optionally overlay with geojson features. $ mapbox staticmap --features features.geojson mapbox.satellite out.png $ mapbox staticmap --lon -61.7 --lat 12.1 --zoom 12 mapbox.satellite out2.png An access token is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None if features: features = list( cligj.normalize_feature_inputs(None, 'features', [features])) service = mapbox.Static(access_token=access_token) try: res = service.image( mapid, lon=lon, lat=lat, z=zoom, width=size[0], height=size[1], features=features, sort_keys=True) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: output.write(res.content) else: raise MapboxCLIException(res.text.strip())
python
def staticmap(ctx, mapid, output, features, lat, lon, zoom, size): """ Generate static map images from existing Mapbox map ids. Optionally overlay with geojson features. $ mapbox staticmap --features features.geojson mapbox.satellite out.png $ mapbox staticmap --lon -61.7 --lat 12.1 --zoom 12 mapbox.satellite out2.png An access token is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None if features: features = list( cligj.normalize_feature_inputs(None, 'features', [features])) service = mapbox.Static(access_token=access_token) try: res = service.image( mapid, lon=lon, lat=lat, z=zoom, width=size[0], height=size[1], features=features, sort_keys=True) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: output.write(res.content) else: raise MapboxCLIException(res.text.strip())
[ "def", "staticmap", "(", "ctx", ",", "mapid", ",", "output", ",", "features", ",", "lat", ",", "lon", ",", "zoom", ",", "size", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "'access_token'", ")", ")", "or", "None", "if", "features", ":", "features", "=", "list", "(", "cligj", ".", "normalize_feature_inputs", "(", "None", ",", "'features'", ",", "[", "features", "]", ")", ")", "service", "=", "mapbox", ".", "Static", "(", "access_token", "=", "access_token", ")", "try", ":", "res", "=", "service", ".", "image", "(", "mapid", ",", "lon", "=", "lon", ",", "lat", "=", "lat", ",", "z", "=", "zoom", ",", "width", "=", "size", "[", "0", "]", ",", "height", "=", "size", "[", "1", "]", ",", "features", "=", "features", ",", "sort_keys", "=", "True", ")", "except", "mapbox", ".", "errors", ".", "ValidationError", "as", "exc", ":", "raise", "click", ".", "BadParameter", "(", "str", "(", "exc", ")", ")", "if", "res", ".", "status_code", "==", "200", ":", "output", ".", "write", "(", "res", ".", "content", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Generate static map images from existing Mapbox map ids. Optionally overlay with geojson features. $ mapbox staticmap --features features.geojson mapbox.satellite out.png $ mapbox staticmap --lon -61.7 --lat 12.1 --zoom 12 mapbox.satellite out2.png An access token is required, see `mapbox --help`.
[ "Generate", "static", "map", "images", "from", "existing", "Mapbox", "map", "ids", ".", "Optionally", "overlay", "with", "geojson", "features", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/static.py#L18-L47
train
mapbox/mapbox-cli-py
mapboxcli/scripts/cli.py
main_group
def main_group(ctx, verbose, quiet, access_token, config): """This is the command line interface to Mapbox web services. Mapbox web services require an access token. Your token is shown on the https://www.mapbox.com/studio/account/tokens/ page when you are logged in. The token can be provided on the command line $ mapbox --access-token MY_TOKEN ... as an environment variable named MAPBOX_ACCESS_TOKEN (higher precedence) or MapboxAccessToken (lower precedence). \b $ export MAPBOX_ACCESS_TOKEN=MY_TOKEN $ mapbox ... or in a config file \b ; configuration file mapbox.ini [mapbox] access-token = MY_TOKEN The OS-dependent default config file path is something like \b ~/Library/Application Support/mapbox/mapbox.ini ~/.config/mapbox/mapbox.ini ~/.mapbox/mapbox.ini """ ctx.obj = {} config = config or os.path.join(click.get_app_dir('mapbox'), 'mapbox.ini') cfg = read_config(config) if cfg: ctx.obj['config_file'] = config ctx.obj['cfg'] = cfg ctx.default_map = cfg verbosity = (os.environ.get('MAPBOX_VERBOSE') or ctx.lookup_default('mapbox.verbosity') or 0) if verbose or quiet: verbosity = verbose - quiet verbosity = int(verbosity) configure_logging(verbosity) access_token = (access_token or os.environ.get('MAPBOX_ACCESS_TOKEN') or os.environ.get('MapboxAccessToken') or ctx.lookup_default('mapbox.access-token')) ctx.obj['verbosity'] = verbosity ctx.obj['access_token'] = access_token
python
def main_group(ctx, verbose, quiet, access_token, config): """This is the command line interface to Mapbox web services. Mapbox web services require an access token. Your token is shown on the https://www.mapbox.com/studio/account/tokens/ page when you are logged in. The token can be provided on the command line $ mapbox --access-token MY_TOKEN ... as an environment variable named MAPBOX_ACCESS_TOKEN (higher precedence) or MapboxAccessToken (lower precedence). \b $ export MAPBOX_ACCESS_TOKEN=MY_TOKEN $ mapbox ... or in a config file \b ; configuration file mapbox.ini [mapbox] access-token = MY_TOKEN The OS-dependent default config file path is something like \b ~/Library/Application Support/mapbox/mapbox.ini ~/.config/mapbox/mapbox.ini ~/.mapbox/mapbox.ini """ ctx.obj = {} config = config or os.path.join(click.get_app_dir('mapbox'), 'mapbox.ini') cfg = read_config(config) if cfg: ctx.obj['config_file'] = config ctx.obj['cfg'] = cfg ctx.default_map = cfg verbosity = (os.environ.get('MAPBOX_VERBOSE') or ctx.lookup_default('mapbox.verbosity') or 0) if verbose or quiet: verbosity = verbose - quiet verbosity = int(verbosity) configure_logging(verbosity) access_token = (access_token or os.environ.get('MAPBOX_ACCESS_TOKEN') or os.environ.get('MapboxAccessToken') or ctx.lookup_default('mapbox.access-token')) ctx.obj['verbosity'] = verbosity ctx.obj['access_token'] = access_token
[ "def", "main_group", "(", "ctx", ",", "verbose", ",", "quiet", ",", "access_token", ",", "config", ")", ":", "ctx", ".", "obj", "=", "{", "}", "config", "=", "config", "or", "os", ".", "path", ".", "join", "(", "click", ".", "get_app_dir", "(", "'mapbox'", ")", ",", "'mapbox.ini'", ")", "cfg", "=", "read_config", "(", "config", ")", "if", "cfg", ":", "ctx", ".", "obj", "[", "'config_file'", "]", "=", "config", "ctx", ".", "obj", "[", "'cfg'", "]", "=", "cfg", "ctx", ".", "default_map", "=", "cfg", "verbosity", "=", "(", "os", ".", "environ", ".", "get", "(", "'MAPBOX_VERBOSE'", ")", "or", "ctx", ".", "lookup_default", "(", "'mapbox.verbosity'", ")", "or", "0", ")", "if", "verbose", "or", "quiet", ":", "verbosity", "=", "verbose", "-", "quiet", "verbosity", "=", "int", "(", "verbosity", ")", "configure_logging", "(", "verbosity", ")", "access_token", "=", "(", "access_token", "or", "os", ".", "environ", ".", "get", "(", "'MAPBOX_ACCESS_TOKEN'", ")", "or", "os", ".", "environ", ".", "get", "(", "'MapboxAccessToken'", ")", "or", "ctx", ".", "lookup_default", "(", "'mapbox.access-token'", ")", ")", "ctx", ".", "obj", "[", "'verbosity'", "]", "=", "verbosity", "ctx", ".", "obj", "[", "'access_token'", "]", "=", "access_token" ]
This is the command line interface to Mapbox web services. Mapbox web services require an access token. Your token is shown on the https://www.mapbox.com/studio/account/tokens/ page when you are logged in. The token can be provided on the command line $ mapbox --access-token MY_TOKEN ... as an environment variable named MAPBOX_ACCESS_TOKEN (higher precedence) or MapboxAccessToken (lower precedence). \b $ export MAPBOX_ACCESS_TOKEN=MY_TOKEN $ mapbox ... or in a config file \b ; configuration file mapbox.ini [mapbox] access-token = MY_TOKEN The OS-dependent default config file path is something like \b ~/Library/Application Support/mapbox/mapbox.ini ~/.config/mapbox/mapbox.ini ~/.mapbox/mapbox.ini
[ "This", "is", "the", "command", "line", "interface", "to", "Mapbox", "web", "services", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/cli.py#L43-L94
train
mapbox/mapbox-cli-py
mapboxcli/scripts/config.py
config
def config(ctx): """Show access token and other configuration settings. The access token and command verbosity level can be set on the command line, as environment variables, and in mapbox.ini config files. """ ctx.default_map = ctx.obj['cfg'] click.echo("CLI:") click.echo("access-token = {0}".format(ctx.obj['access_token'])) click.echo("verbosity = {0}".format(ctx.obj['verbosity'])) click.echo("") click.echo("Environment:") if 'MAPBOX_ACCESS_TOKEN' in os.environ: click.echo("MAPBOX_ACCESS_TOKEN = {0}".format( os.environ['MAPBOX_ACCESS_TOKEN'])) if 'MapboxAccessToken' in os.environ: click.echo("MapboxAccessToken = {0}".format( os.environ['MapboxAccessToken'])) if 'MAPBOX_VERBOSE' in os.environ: click.echo("MAPBOX_VERBOSE = {0}".format( os.environ['MAPBOX_VERBOSE'])) click.echo("") if 'config_file' in ctx.obj: click.echo("Config file {0}:".format(ctx.obj['config_file'])) for key, value in ctx.default_map.items(): click.echo("{0} = {1}".format(key, value)) click.echo("")
python
def config(ctx): """Show access token and other configuration settings. The access token and command verbosity level can be set on the command line, as environment variables, and in mapbox.ini config files. """ ctx.default_map = ctx.obj['cfg'] click.echo("CLI:") click.echo("access-token = {0}".format(ctx.obj['access_token'])) click.echo("verbosity = {0}".format(ctx.obj['verbosity'])) click.echo("") click.echo("Environment:") if 'MAPBOX_ACCESS_TOKEN' in os.environ: click.echo("MAPBOX_ACCESS_TOKEN = {0}".format( os.environ['MAPBOX_ACCESS_TOKEN'])) if 'MapboxAccessToken' in os.environ: click.echo("MapboxAccessToken = {0}".format( os.environ['MapboxAccessToken'])) if 'MAPBOX_VERBOSE' in os.environ: click.echo("MAPBOX_VERBOSE = {0}".format( os.environ['MAPBOX_VERBOSE'])) click.echo("") if 'config_file' in ctx.obj: click.echo("Config file {0}:".format(ctx.obj['config_file'])) for key, value in ctx.default_map.items(): click.echo("{0} = {1}".format(key, value)) click.echo("")
[ "def", "config", "(", "ctx", ")", ":", "ctx", ".", "default_map", "=", "ctx", ".", "obj", "[", "'cfg'", "]", "click", ".", "echo", "(", "\"CLI:\"", ")", "click", ".", "echo", "(", "\"access-token = {0}\"", ".", "format", "(", "ctx", ".", "obj", "[", "'access_token'", "]", ")", ")", "click", ".", "echo", "(", "\"verbosity = {0}\"", ".", "format", "(", "ctx", ".", "obj", "[", "'verbosity'", "]", ")", ")", "click", ".", "echo", "(", "\"\"", ")", "click", ".", "echo", "(", "\"Environment:\"", ")", "if", "'MAPBOX_ACCESS_TOKEN'", "in", "os", ".", "environ", ":", "click", ".", "echo", "(", "\"MAPBOX_ACCESS_TOKEN = {0}\"", ".", "format", "(", "os", ".", "environ", "[", "'MAPBOX_ACCESS_TOKEN'", "]", ")", ")", "if", "'MapboxAccessToken'", "in", "os", ".", "environ", ":", "click", ".", "echo", "(", "\"MapboxAccessToken = {0}\"", ".", "format", "(", "os", ".", "environ", "[", "'MapboxAccessToken'", "]", ")", ")", "if", "'MAPBOX_VERBOSE'", "in", "os", ".", "environ", ":", "click", ".", "echo", "(", "\"MAPBOX_VERBOSE = {0}\"", ".", "format", "(", "os", ".", "environ", "[", "'MAPBOX_VERBOSE'", "]", ")", ")", "click", ".", "echo", "(", "\"\"", ")", "if", "'config_file'", "in", "ctx", ".", "obj", ":", "click", ".", "echo", "(", "\"Config file {0}:\"", ".", "format", "(", "ctx", ".", "obj", "[", "'config_file'", "]", ")", ")", "for", "key", ",", "value", "in", "ctx", ".", "default_map", ".", "items", "(", ")", ":", "click", ".", "echo", "(", "\"{0} = {1}\"", ".", "format", "(", "key", ",", "value", ")", ")", "click", ".", "echo", "(", "\"\"", ")" ]
Show access token and other configuration settings. The access token and command verbosity level can be set on the command line, as environment variables, and in mapbox.ini config files.
[ "Show", "access", "token", "and", "other", "configuration", "settings", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/config.py#L8-L37
train
mapbox/mapbox-cli-py
mapboxcli/scripts/geocoding.py
echo_headers
def echo_headers(headers, file=None): """Echo headers, sorted.""" for k, v in sorted(headers.items()): click.echo("{0}: {1}".format(k.title(), v), file=file) click.echo(file=file)
python
def echo_headers(headers, file=None): """Echo headers, sorted.""" for k, v in sorted(headers.items()): click.echo("{0}: {1}".format(k.title(), v), file=file) click.echo(file=file)
[ "def", "echo_headers", "(", "headers", ",", "file", "=", "None", ")", ":", "for", "k", ",", "v", "in", "sorted", "(", "headers", ".", "items", "(", ")", ")", ":", "click", ".", "echo", "(", "\"{0}: {1}\"", ".", "format", "(", "k", ".", "title", "(", ")", ",", "v", ")", ",", "file", "=", "file", ")", "click", ".", "echo", "(", "file", "=", "file", ")" ]
Echo headers, sorted.
[ "Echo", "headers", "sorted", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/geocoding.py#L34-L38
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
datasets
def datasets(ctx): """Read and write GeoJSON from Mapbox-hosted datasets All endpoints require authentication. An access token with appropriate dataset scopes is required, see `mapbox --help`. Note that this API is currently a limited-access beta. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Datasets(access_token=access_token) ctx.obj['service'] = service
python
def datasets(ctx): """Read and write GeoJSON from Mapbox-hosted datasets All endpoints require authentication. An access token with appropriate dataset scopes is required, see `mapbox --help`. Note that this API is currently a limited-access beta. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Datasets(access_token=access_token) ctx.obj['service'] = service
[ "def", "datasets", "(", "ctx", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "'access_token'", ")", ")", "or", "None", "service", "=", "mapbox", ".", "Datasets", "(", "access_token", "=", "access_token", ")", "ctx", ".", "obj", "[", "'service'", "]", "=", "service" ]
Read and write GeoJSON from Mapbox-hosted datasets All endpoints require authentication. An access token with appropriate dataset scopes is required, see `mapbox --help`. Note that this API is currently a limited-access beta.
[ "Read", "and", "write", "GeoJSON", "from", "Mapbox", "-", "hosted", "datasets" ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L13-L24
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
create
def create(ctx, name, description): """Create a new dataset. Prints a JSON object containing the attributes of the new dataset. $ mapbox datasets create All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ service = ctx.obj.get('service') res = service.create(name, description) if res.status_code == 200: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
python
def create(ctx, name, description): """Create a new dataset. Prints a JSON object containing the attributes of the new dataset. $ mapbox datasets create All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ service = ctx.obj.get('service') res = service.create(name, description) if res.status_code == 200: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
[ "def", "create", "(", "ctx", ",", "name", ",", "description", ")", ":", "service", "=", "ctx", ".", "obj", ".", "get", "(", "'service'", ")", "res", "=", "service", ".", "create", "(", "name", ",", "description", ")", "if", "res", ".", "status_code", "==", "200", ":", "click", ".", "echo", "(", "res", ".", "text", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Create a new dataset. Prints a JSON object containing the attributes of the new dataset. $ mapbox datasets create All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`.
[ "Create", "a", "new", "dataset", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L56-L74
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
read_dataset
def read_dataset(ctx, dataset, output): """Read the attributes of a dataset. Prints a JSON object containing the attributes of a dataset. The attributes: owner (a Mapbox account), id (dataset id), created (Unix timestamp), modified (timestamp), name (string), and description (string). $ mapbox datasets read-dataset dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`. """ stdout = click.open_file(output, 'w') service = ctx.obj.get('service') res = service.read_dataset(dataset) if res.status_code == 200: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
python
def read_dataset(ctx, dataset, output): """Read the attributes of a dataset. Prints a JSON object containing the attributes of a dataset. The attributes: owner (a Mapbox account), id (dataset id), created (Unix timestamp), modified (timestamp), name (string), and description (string). $ mapbox datasets read-dataset dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`. """ stdout = click.open_file(output, 'w') service = ctx.obj.get('service') res = service.read_dataset(dataset) if res.status_code == 200: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
[ "def", "read_dataset", "(", "ctx", ",", "dataset", ",", "output", ")", ":", "stdout", "=", "click", ".", "open_file", "(", "output", ",", "'w'", ")", "service", "=", "ctx", ".", "obj", ".", "get", "(", "'service'", ")", "res", "=", "service", ".", "read_dataset", "(", "dataset", ")", "if", "res", ".", "status_code", "==", "200", ":", "click", ".", "echo", "(", "res", ".", "text", ",", "file", "=", "stdout", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Read the attributes of a dataset. Prints a JSON object containing the attributes of a dataset. The attributes: owner (a Mapbox account), id (dataset id), created (Unix timestamp), modified (timestamp), name (string), and description (string). $ mapbox datasets read-dataset dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`.
[ "Read", "the", "attributes", "of", "a", "dataset", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L82-L103
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
list_features
def list_features(ctx, dataset, reverse, start, limit, output): """Get features of a dataset. Prints the features of the dataset as a GeoJSON feature collection. $ mapbox datasets list-features dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`. """ stdout = click.open_file(output, 'w') service = ctx.obj.get('service') res = service.list_features(dataset, reverse, start, limit) if res.status_code == 200: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
python
def list_features(ctx, dataset, reverse, start, limit, output): """Get features of a dataset. Prints the features of the dataset as a GeoJSON feature collection. $ mapbox datasets list-features dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`. """ stdout = click.open_file(output, 'w') service = ctx.obj.get('service') res = service.list_features(dataset, reverse, start, limit) if res.status_code == 200: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
[ "def", "list_features", "(", "ctx", ",", "dataset", ",", "reverse", ",", "start", ",", "limit", ",", "output", ")", ":", "stdout", "=", "click", ".", "open_file", "(", "output", ",", "'w'", ")", "service", "=", "ctx", ".", "obj", ".", "get", "(", "'service'", ")", "res", "=", "service", ".", "list_features", "(", "dataset", ",", "reverse", ",", "start", ",", "limit", ")", "if", "res", ".", "status_code", "==", "200", ":", "click", ".", "echo", "(", "res", ".", "text", ",", "file", "=", "stdout", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Get features of a dataset. Prints the features of the dataset as a GeoJSON feature collection. $ mapbox datasets list-features dataset-id All endpoints require authentication. An access token with `datasets:read` scope is required, see `mapbox --help`.
[ "Get", "features", "of", "a", "dataset", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L165-L183
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
put_feature
def put_feature(ctx, dataset, fid, feature, input): """Create or update a dataset feature. The semantics of HTTP PUT apply: if the dataset has no feature with the given `fid` a new feature will be created. Returns a GeoJSON representation of the new or updated feature. $ mapbox datasets put-feature dataset-id feature-id 'geojson-feature' All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ if feature is None: stdin = click.open_file(input, 'r') feature = stdin.read() feature = json.loads(feature) service = ctx.obj.get('service') res = service.update_feature(dataset, fid, feature) if res.status_code == 200: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
python
def put_feature(ctx, dataset, fid, feature, input): """Create or update a dataset feature. The semantics of HTTP PUT apply: if the dataset has no feature with the given `fid` a new feature will be created. Returns a GeoJSON representation of the new or updated feature. $ mapbox datasets put-feature dataset-id feature-id 'geojson-feature' All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ if feature is None: stdin = click.open_file(input, 'r') feature = stdin.read() feature = json.loads(feature) service = ctx.obj.get('service') res = service.update_feature(dataset, fid, feature) if res.status_code == 200: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
[ "def", "put_feature", "(", "ctx", ",", "dataset", ",", "fid", ",", "feature", ",", "input", ")", ":", "if", "feature", "is", "None", ":", "stdin", "=", "click", ".", "open_file", "(", "input", ",", "'r'", ")", "feature", "=", "stdin", ".", "read", "(", ")", "feature", "=", "json", ".", "loads", "(", "feature", ")", "service", "=", "ctx", ".", "obj", ".", "get", "(", "'service'", ")", "res", "=", "service", ".", "update_feature", "(", "dataset", ",", "fid", ",", "feature", ")", "if", "res", ".", "status_code", "==", "200", ":", "click", ".", "echo", "(", "res", ".", "text", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Create or update a dataset feature. The semantics of HTTP PUT apply: if the dataset has no feature with the given `fid` a new feature will be created. Returns a GeoJSON representation of the new or updated feature. $ mapbox datasets put-feature dataset-id feature-id 'geojson-feature' All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`.
[ "Create", "or", "update", "a", "dataset", "feature", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L221-L246
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
delete_feature
def delete_feature(ctx, dataset, fid): """Delete a feature. $ mapbox datasets delete-feature dataset-id feature-id All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ service = ctx.obj.get('service') res = service.delete_feature(dataset, fid) if res.status_code != 204: raise MapboxCLIException(res.text.strip())
python
def delete_feature(ctx, dataset, fid): """Delete a feature. $ mapbox datasets delete-feature dataset-id feature-id All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`. """ service = ctx.obj.get('service') res = service.delete_feature(dataset, fid) if res.status_code != 204: raise MapboxCLIException(res.text.strip())
[ "def", "delete_feature", "(", "ctx", ",", "dataset", ",", "fid", ")", ":", "service", "=", "ctx", ".", "obj", ".", "get", "(", "'service'", ")", "res", "=", "service", ".", "delete_feature", "(", "dataset", ",", "fid", ")", "if", "res", ".", "status_code", "!=", "204", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Delete a feature. $ mapbox datasets delete-feature dataset-id feature-id All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`.
[ "Delete", "a", "feature", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L254-L267
train
mapbox/mapbox-cli-py
mapboxcli/scripts/datasets.py
create_tileset
def create_tileset(ctx, dataset, tileset, name): """Create a vector tileset from a dataset. $ mapbox datasets create-tileset dataset-id username.data Note that the tileset must start with your username and the dataset must be one that you own. To view processing status, visit https://www.mapbox.com/data/. You may not generate another tilesets from the same dataset until the first processing job has completed. All endpoints require authentication. An access token with `uploads:write` scope is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Uploader(access_token=access_token) uri = "mapbox://datasets/{username}/{dataset}".format( username=tileset.split('.')[0], dataset=dataset) res = service.create(uri, tileset, name) if res.status_code == 201: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
python
def create_tileset(ctx, dataset, tileset, name): """Create a vector tileset from a dataset. $ mapbox datasets create-tileset dataset-id username.data Note that the tileset must start with your username and the dataset must be one that you own. To view processing status, visit https://www.mapbox.com/data/. You may not generate another tilesets from the same dataset until the first processing job has completed. All endpoints require authentication. An access token with `uploads:write` scope is required, see `mapbox --help`. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Uploader(access_token=access_token) uri = "mapbox://datasets/{username}/{dataset}".format( username=tileset.split('.')[0], dataset=dataset) res = service.create(uri, tileset, name) if res.status_code == 201: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
[ "def", "create_tileset", "(", "ctx", ",", "dataset", ",", "tileset", ",", "name", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "'access_token'", ")", ")", "or", "None", "service", "=", "mapbox", ".", "Uploader", "(", "access_token", "=", "access_token", ")", "uri", "=", "\"mapbox://datasets/{username}/{dataset}\"", ".", "format", "(", "username", "=", "tileset", ".", "split", "(", "'.'", ")", "[", "0", "]", ",", "dataset", "=", "dataset", ")", "res", "=", "service", ".", "create", "(", "uri", ",", "tileset", ",", "name", ")", "if", "res", ".", "status_code", "==", "201", ":", "click", ".", "echo", "(", "res", ".", "text", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Create a vector tileset from a dataset. $ mapbox datasets create-tileset dataset-id username.data Note that the tileset must start with your username and the dataset must be one that you own. To view processing status, visit https://www.mapbox.com/data/. You may not generate another tilesets from the same dataset until the first processing job has completed. All endpoints require authentication. An access token with `uploads:write` scope is required, see `mapbox --help`.
[ "Create", "a", "vector", "tileset", "from", "a", "dataset", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/datasets.py#L276-L301
train
mapbox/mapbox-cli-py
mapboxcli/scripts/directions.py
directions
def directions(ctx, features, profile, alternatives, geometries, overview, steps, continue_straight, waypoint_snapping, annotations, language, output): """The Mapbox Directions API will show you how to get where you're going. mapbox directions "[0, 0]" "[1, 1]" An access token is required. See "mapbox --help". """ access_token = (ctx.obj and ctx.obj.get("access_token")) or None service = mapbox.Directions(access_token=access_token) # The Directions SDK expects False to be # a bool, not a str. if overview == "False": overview = False # When using waypoint snapping, the # Directions SDK expects features to be # a list, not a generator. if waypoint_snapping is not None: features = list(features) if annotations: annotations = annotations.split(",") stdout = click.open_file(output, "w") try: res = service.directions( features, profile=profile, alternatives=alternatives, geometries=geometries, overview=overview, steps=steps, continue_straight=continue_straight, waypoint_snapping=waypoint_snapping, annotations=annotations, language=language ) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: if geometries == "geojson": click.echo(json.dumps(res.geojson()), file=stdout) else: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
python
def directions(ctx, features, profile, alternatives, geometries, overview, steps, continue_straight, waypoint_snapping, annotations, language, output): """The Mapbox Directions API will show you how to get where you're going. mapbox directions "[0, 0]" "[1, 1]" An access token is required. See "mapbox --help". """ access_token = (ctx.obj and ctx.obj.get("access_token")) or None service = mapbox.Directions(access_token=access_token) # The Directions SDK expects False to be # a bool, not a str. if overview == "False": overview = False # When using waypoint snapping, the # Directions SDK expects features to be # a list, not a generator. if waypoint_snapping is not None: features = list(features) if annotations: annotations = annotations.split(",") stdout = click.open_file(output, "w") try: res = service.directions( features, profile=profile, alternatives=alternatives, geometries=geometries, overview=overview, steps=steps, continue_straight=continue_straight, waypoint_snapping=waypoint_snapping, annotations=annotations, language=language ) except mapbox.errors.ValidationError as exc: raise click.BadParameter(str(exc)) if res.status_code == 200: if geometries == "geojson": click.echo(json.dumps(res.geojson()), file=stdout) else: click.echo(res.text, file=stdout) else: raise MapboxCLIException(res.text.strip())
[ "def", "directions", "(", "ctx", ",", "features", ",", "profile", ",", "alternatives", ",", "geometries", ",", "overview", ",", "steps", ",", "continue_straight", ",", "waypoint_snapping", ",", "annotations", ",", "language", ",", "output", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "\"access_token\"", ")", ")", "or", "None", "service", "=", "mapbox", ".", "Directions", "(", "access_token", "=", "access_token", ")", "# The Directions SDK expects False to be", "# a bool, not a str.", "if", "overview", "==", "\"False\"", ":", "overview", "=", "False", "# When using waypoint snapping, the ", "# Directions SDK expects features to be ", "# a list, not a generator.", "if", "waypoint_snapping", "is", "not", "None", ":", "features", "=", "list", "(", "features", ")", "if", "annotations", ":", "annotations", "=", "annotations", ".", "split", "(", "\",\"", ")", "stdout", "=", "click", ".", "open_file", "(", "output", ",", "\"w\"", ")", "try", ":", "res", "=", "service", ".", "directions", "(", "features", ",", "profile", "=", "profile", ",", "alternatives", "=", "alternatives", ",", "geometries", "=", "geometries", ",", "overview", "=", "overview", ",", "steps", "=", "steps", ",", "continue_straight", "=", "continue_straight", ",", "waypoint_snapping", "=", "waypoint_snapping", ",", "annotations", "=", "annotations", ",", "language", "=", "language", ")", "except", "mapbox", ".", "errors", ".", "ValidationError", "as", "exc", ":", "raise", "click", ".", "BadParameter", "(", "str", "(", "exc", ")", ")", "if", "res", ".", "status_code", "==", "200", ":", "if", "geometries", "==", "\"geojson\"", ":", "click", ".", "echo", "(", "json", ".", "dumps", "(", "res", ".", "geojson", "(", ")", ")", ",", "file", "=", "stdout", ")", "else", ":", "click", ".", "echo", "(", "res", ".", "text", ",", "file", "=", "stdout", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
The Mapbox Directions API will show you how to get where you're going. mapbox directions "[0, 0]" "[1, 1]" An access token is required. See "mapbox --help".
[ "The", "Mapbox", "Directions", "API", "will", "show", "you", "how", "to", "get", "where", "you", "re", "going", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/directions.py#L163-L218
train
mapbox/mapbox-cli-py
mapboxcli/scripts/uploads.py
upload
def upload(ctx, tileset, datasource, name, patch): """Upload data to Mapbox accounts. Uploaded data lands at https://www.mapbox.com/data/ and can be used in new or existing projects. All endpoints require authentication. You can specify the tileset id and input file $ mapbox upload username.data mydata.geojson Or specify just the tileset id and take an input file on stdin $ cat mydata.geojson | mapbox upload username.data The --name option defines the title as it appears in Studio and defaults to the last part of the tileset id, e.g. "data" Note that the tileset must start with your username. An access token with upload scope is required, see `mapbox --help`. Your account must be flagged in order to use the patch mode feature. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Uploader(access_token=access_token) if name is None: name = tileset.split(".")[-1] if datasource.startswith('https://'): # Skip staging. Note this this only works for specific buckets. res = service.create(datasource, tileset, name=name, patch=patch) else: sourcefile = click.File('rb')(datasource) if hasattr(sourcefile, 'name'): filelen = ( 1 if sourcefile.name == '<stdin>' else os.stat(sourcefile.name).st_size) else: filelen = (len(sourcefile.getbuffer()) if hasattr(sourcefile, 'getbuffer') else 1) with click.progressbar(length=filelen, label='Uploading data source', fill_char="#", empty_char='-', file=sys.stderr) as bar: def callback(num_bytes): """Update the progress bar""" bar.update(num_bytes) res = service.upload(sourcefile, tileset, name, patch=patch, callback=callback) if res.status_code == 201: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
python
def upload(ctx, tileset, datasource, name, patch): """Upload data to Mapbox accounts. Uploaded data lands at https://www.mapbox.com/data/ and can be used in new or existing projects. All endpoints require authentication. You can specify the tileset id and input file $ mapbox upload username.data mydata.geojson Or specify just the tileset id and take an input file on stdin $ cat mydata.geojson | mapbox upload username.data The --name option defines the title as it appears in Studio and defaults to the last part of the tileset id, e.g. "data" Note that the tileset must start with your username. An access token with upload scope is required, see `mapbox --help`. Your account must be flagged in order to use the patch mode feature. """ access_token = (ctx.obj and ctx.obj.get('access_token')) or None service = mapbox.Uploader(access_token=access_token) if name is None: name = tileset.split(".")[-1] if datasource.startswith('https://'): # Skip staging. Note this this only works for specific buckets. res = service.create(datasource, tileset, name=name, patch=patch) else: sourcefile = click.File('rb')(datasource) if hasattr(sourcefile, 'name'): filelen = ( 1 if sourcefile.name == '<stdin>' else os.stat(sourcefile.name).st_size) else: filelen = (len(sourcefile.getbuffer()) if hasattr(sourcefile, 'getbuffer') else 1) with click.progressbar(length=filelen, label='Uploading data source', fill_char="#", empty_char='-', file=sys.stderr) as bar: def callback(num_bytes): """Update the progress bar""" bar.update(num_bytes) res = service.upload(sourcefile, tileset, name, patch=patch, callback=callback) if res.status_code == 201: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
[ "def", "upload", "(", "ctx", ",", "tileset", ",", "datasource", ",", "name", ",", "patch", ")", ":", "access_token", "=", "(", "ctx", ".", "obj", "and", "ctx", ".", "obj", ".", "get", "(", "'access_token'", ")", ")", "or", "None", "service", "=", "mapbox", ".", "Uploader", "(", "access_token", "=", "access_token", ")", "if", "name", "is", "None", ":", "name", "=", "tileset", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "if", "datasource", ".", "startswith", "(", "'https://'", ")", ":", "# Skip staging. Note this this only works for specific buckets.", "res", "=", "service", ".", "create", "(", "datasource", ",", "tileset", ",", "name", "=", "name", ",", "patch", "=", "patch", ")", "else", ":", "sourcefile", "=", "click", ".", "File", "(", "'rb'", ")", "(", "datasource", ")", "if", "hasattr", "(", "sourcefile", ",", "'name'", ")", ":", "filelen", "=", "(", "1", "if", "sourcefile", ".", "name", "==", "'<stdin>'", "else", "os", ".", "stat", "(", "sourcefile", ".", "name", ")", ".", "st_size", ")", "else", ":", "filelen", "=", "(", "len", "(", "sourcefile", ".", "getbuffer", "(", ")", ")", "if", "hasattr", "(", "sourcefile", ",", "'getbuffer'", ")", "else", "1", ")", "with", "click", ".", "progressbar", "(", "length", "=", "filelen", ",", "label", "=", "'Uploading data source'", ",", "fill_char", "=", "\"#\"", ",", "empty_char", "=", "'-'", ",", "file", "=", "sys", ".", "stderr", ")", "as", "bar", ":", "def", "callback", "(", "num_bytes", ")", ":", "\"\"\"Update the progress bar\"\"\"", "bar", ".", "update", "(", "num_bytes", ")", "res", "=", "service", ".", "upload", "(", "sourcefile", ",", "tileset", ",", "name", ",", "patch", "=", "patch", ",", "callback", "=", "callback", ")", "if", "res", ".", "status_code", "==", "201", ":", "click", ".", "echo", "(", "res", ".", "text", ")", "else", ":", "raise", "MapboxCLIException", "(", "res", ".", "text", ".", "strip", "(", ")", ")" ]
Upload data to Mapbox accounts. Uploaded data lands at https://www.mapbox.com/data/ and can be used in new or existing projects. All endpoints require authentication. You can specify the tileset id and input file $ mapbox upload username.data mydata.geojson Or specify just the tileset id and take an input file on stdin $ cat mydata.geojson | mapbox upload username.data The --name option defines the title as it appears in Studio and defaults to the last part of the tileset id, e.g. "data" Note that the tileset must start with your username. An access token with upload scope is required, see `mapbox --help`. Your account must be flagged in order to use the patch mode feature.
[ "Upload", "data", "to", "Mapbox", "accounts", "." ]
b75544a2f83a4fda79d78b5673058e16e64a4f6d
https://github.com/mapbox/mapbox-cli-py/blob/b75544a2f83a4fda79d78b5673058e16e64a4f6d/mapboxcli/scripts/uploads.py#L17-L76
train
aaren/notedown
notedown/contentsmanager.py
NotedownContentsManager._save_notebook
def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding='utf-8') as f: if ftdetect(os_path) == 'notebook': nbformat.write(nb, f, version=nbformat.NO_CONVERT) elif ftdetect(os_path) == 'markdown': nbjson = nbformat.writes(nb, version=nbformat.NO_CONVERT) markdown = convert(nbjson, informat='notebook', outformat='markdown', strip_outputs=self.strip_outputs) f.write(markdown)
python
def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding='utf-8') as f: if ftdetect(os_path) == 'notebook': nbformat.write(nb, f, version=nbformat.NO_CONVERT) elif ftdetect(os_path) == 'markdown': nbjson = nbformat.writes(nb, version=nbformat.NO_CONVERT) markdown = convert(nbjson, informat='notebook', outformat='markdown', strip_outputs=self.strip_outputs) f.write(markdown)
[ "def", "_save_notebook", "(", "self", ",", "os_path", ",", "nb", ")", ":", "with", "self", ".", "atomic_writing", "(", "os_path", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "if", "ftdetect", "(", "os_path", ")", "==", "'notebook'", ":", "nbformat", ".", "write", "(", "nb", ",", "f", ",", "version", "=", "nbformat", ".", "NO_CONVERT", ")", "elif", "ftdetect", "(", "os_path", ")", "==", "'markdown'", ":", "nbjson", "=", "nbformat", ".", "writes", "(", "nb", ",", "version", "=", "nbformat", ".", "NO_CONVERT", ")", "markdown", "=", "convert", "(", "nbjson", ",", "informat", "=", "'notebook'", ",", "outformat", "=", "'markdown'", ",", "strip_outputs", "=", "self", ".", "strip_outputs", ")", "f", ".", "write", "(", "markdown", ")" ]
Save a notebook to an os_path.
[ "Save", "a", "notebook", "to", "an", "os_path", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/contentsmanager.py#L52-L63
train
aaren/notedown
notedown/main.py
ftdetect
def ftdetect(filename): """Determine if filename is markdown or notebook, based on the file extension. """ _, extension = os.path.splitext(filename) md_exts = ['.md', '.markdown', '.mkd', '.mdown', '.mkdn', '.Rmd'] nb_exts = ['.ipynb'] if extension in md_exts: return 'markdown' elif extension in nb_exts: return 'notebook' else: return None
python
def ftdetect(filename): """Determine if filename is markdown or notebook, based on the file extension. """ _, extension = os.path.splitext(filename) md_exts = ['.md', '.markdown', '.mkd', '.mdown', '.mkdn', '.Rmd'] nb_exts = ['.ipynb'] if extension in md_exts: return 'markdown' elif extension in nb_exts: return 'notebook' else: return None
[ "def", "ftdetect", "(", "filename", ")", ":", "_", ",", "extension", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "md_exts", "=", "[", "'.md'", ",", "'.markdown'", ",", "'.mkd'", ",", "'.mdown'", ",", "'.mkdn'", ",", "'.Rmd'", "]", "nb_exts", "=", "[", "'.ipynb'", "]", "if", "extension", "in", "md_exts", ":", "return", "'markdown'", "elif", "extension", "in", "nb_exts", ":", "return", "'notebook'", "else", ":", "return", "None" ]
Determine if filename is markdown or notebook, based on the file extension.
[ "Determine", "if", "filename", "is", "markdown", "or", "notebook", "based", "on", "the", "file", "extension", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/main.py#L107-L119
train
aaren/notedown
notedown/notedown.py
strip
def strip(notebook): """Remove outputs from a notebook.""" for cell in notebook.cells: if cell.cell_type == 'code': cell.outputs = [] cell.execution_count = None
python
def strip(notebook): """Remove outputs from a notebook.""" for cell in notebook.cells: if cell.cell_type == 'code': cell.outputs = [] cell.execution_count = None
[ "def", "strip", "(", "notebook", ")", ":", "for", "cell", "in", "notebook", ".", "cells", ":", "if", "cell", ".", "cell_type", "==", "'code'", ":", "cell", ".", "outputs", "=", "[", "]", "cell", ".", "execution_count", "=", "None" ]
Remove outputs from a notebook.
[ "Remove", "outputs", "from", "a", "notebook", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L38-L43
train
aaren/notedown
notedown/notedown.py
get_caption_comments
def get_caption_comments(content): """Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped. """ if not content.startswith('## fig:'): return None, None content = content.splitlines() id = content[0].strip('## ') caption = [] for line in content[1:]: if not line.startswith('# ') or line.startswith('##'): break else: caption.append(line.lstrip('# ').rstrip()) # add " around the caption. TODO: consider doing this upstream # in pandoc-attributes caption = '"' + ' '.join(caption) + '"' return id, caption
python
def get_caption_comments(content): """Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped. """ if not content.startswith('## fig:'): return None, None content = content.splitlines() id = content[0].strip('## ') caption = [] for line in content[1:]: if not line.startswith('# ') or line.startswith('##'): break else: caption.append(line.lstrip('# ').rstrip()) # add " around the caption. TODO: consider doing this upstream # in pandoc-attributes caption = '"' + ' '.join(caption) + '"' return id, caption
[ "def", "get_caption_comments", "(", "content", ")", ":", "if", "not", "content", ".", "startswith", "(", "'## fig:'", ")", ":", "return", "None", ",", "None", "content", "=", "content", ".", "splitlines", "(", ")", "id", "=", "content", "[", "0", "]", ".", "strip", "(", "'## '", ")", "caption", "=", "[", "]", "for", "line", "in", "content", "[", "1", ":", "]", ":", "if", "not", "line", ".", "startswith", "(", "'# '", ")", "or", "line", ".", "startswith", "(", "'##'", ")", ":", "break", "else", ":", "caption", ".", "append", "(", "line", ".", "lstrip", "(", "'# '", ")", ".", "rstrip", "(", ")", ")", "# add \" around the caption. TODO: consider doing this upstream", "# in pandoc-attributes", "caption", "=", "'\"'", "+", "' '", ".", "join", "(", "caption", ")", "+", "'\"'", "return", "id", ",", "caption" ]
Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped.
[ "Retrieve", "an", "id", "and", "a", "caption", "from", "a", "code", "cell", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L648-L679
train
aaren/notedown
notedown/notedown.py
MarkdownReader.new_code_block
def new_code_block(self, **kwargs): """Create a new code block.""" proto = {'content': '', 'type': self.code, 'IO': '', 'attributes': ''} proto.update(**kwargs) return proto
python
def new_code_block(self, **kwargs): """Create a new code block.""" proto = {'content': '', 'type': self.code, 'IO': '', 'attributes': ''} proto.update(**kwargs) return proto
[ "def", "new_code_block", "(", "self", ",", "*", "*", "kwargs", ")", ":", "proto", "=", "{", "'content'", ":", "''", ",", "'type'", ":", "self", ".", "code", ",", "'IO'", ":", "''", ",", "'attributes'", ":", "''", "}", "proto", ".", "update", "(", "*", "*", "kwargs", ")", "return", "proto" ]
Create a new code block.
[ "Create", "a", "new", "code", "block", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L147-L154
train
aaren/notedown
notedown/notedown.py
MarkdownReader.new_text_block
def new_text_block(self, **kwargs): """Create a new text block.""" proto = {'content': '', 'type': self.markdown} proto.update(**kwargs) return proto
python
def new_text_block(self, **kwargs): """Create a new text block.""" proto = {'content': '', 'type': self.markdown} proto.update(**kwargs) return proto
[ "def", "new_text_block", "(", "self", ",", "*", "*", "kwargs", ")", ":", "proto", "=", "{", "'content'", ":", "''", ",", "'type'", ":", "self", ".", "markdown", "}", "proto", ".", "update", "(", "*", "*", "kwargs", ")", "return", "proto" ]
Create a new text block.
[ "Create", "a", "new", "text", "block", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L156-L160
train
aaren/notedown
notedown/notedown.py
MarkdownReader.pre_process_code_block
def pre_process_code_block(block): """Preprocess the content of a code block, modifying the code block in place. Just dedents indented code. """ if 'indent' in block and block['indent']: indent = r'^' + block['indent'] block['content'] = re.sub(indent, '', block['icontent'], flags=re.MULTILINE)
python
def pre_process_code_block(block): """Preprocess the content of a code block, modifying the code block in place. Just dedents indented code. """ if 'indent' in block and block['indent']: indent = r'^' + block['indent'] block['content'] = re.sub(indent, '', block['icontent'], flags=re.MULTILINE)
[ "def", "pre_process_code_block", "(", "block", ")", ":", "if", "'indent'", "in", "block", "and", "block", "[", "'indent'", "]", ":", "indent", "=", "r'^'", "+", "block", "[", "'indent'", "]", "block", "[", "'content'", "]", "=", "re", ".", "sub", "(", "indent", ",", "''", ",", "block", "[", "'icontent'", "]", ",", "flags", "=", "re", ".", "MULTILINE", ")" ]
Preprocess the content of a code block, modifying the code block in place. Just dedents indented code.
[ "Preprocess", "the", "content", "of", "a", "code", "block", "modifying", "the", "code", "block", "in", "place", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L169-L178
train
aaren/notedown
notedown/notedown.py
MarkdownReader.process_code_block
def process_code_block(self, block): """Parse block attributes""" if block['type'] != self.code: return block attr = PandocAttributes(block['attributes'], 'markdown') if self.match == 'all': pass elif self.match == 'fenced' and block.get('indent'): return self.new_text_block(content=('\n' + block['icontent'] + '\n')) elif self.match == 'strict' and 'input' not in attr.classes: return self.new_text_block(content=block['raw']) elif self.match not in list(attr.classes) + ['fenced', 'strict']: return self.new_text_block(content=block['raw']) # set input / output status of cell if 'output' in attr.classes and 'json' in attr.classes: block['IO'] = 'output' elif 'input' in attr.classes: block['IO'] = 'input' attr.classes.remove('input') else: block['IO'] = 'input' if self.caption_comments: # override attributes id and caption with those set in # comments, if they exist id, caption = get_caption_comments(block['content']) if id: attr.id = id if caption: attr['caption'] = caption try: # determine the language as the first class that # is in the block attributes and also in the list # of languages language = set(attr.classes).intersection(languages).pop() attr.classes.remove(language) except KeyError: language = None block['language'] = language block['attributes'] = attr # ensure one identifier for python code if language in ('python', 'py', '', None): block['language'] = self.python # add alternate language execution magic elif language != self.python and self.magic: block['content'] = CodeMagician.magic(language) + block['content'] block['language'] = language return self.new_code_block(**block)
python
def process_code_block(self, block): """Parse block attributes""" if block['type'] != self.code: return block attr = PandocAttributes(block['attributes'], 'markdown') if self.match == 'all': pass elif self.match == 'fenced' and block.get('indent'): return self.new_text_block(content=('\n' + block['icontent'] + '\n')) elif self.match == 'strict' and 'input' not in attr.classes: return self.new_text_block(content=block['raw']) elif self.match not in list(attr.classes) + ['fenced', 'strict']: return self.new_text_block(content=block['raw']) # set input / output status of cell if 'output' in attr.classes and 'json' in attr.classes: block['IO'] = 'output' elif 'input' in attr.classes: block['IO'] = 'input' attr.classes.remove('input') else: block['IO'] = 'input' if self.caption_comments: # override attributes id and caption with those set in # comments, if they exist id, caption = get_caption_comments(block['content']) if id: attr.id = id if caption: attr['caption'] = caption try: # determine the language as the first class that # is in the block attributes and also in the list # of languages language = set(attr.classes).intersection(languages).pop() attr.classes.remove(language) except KeyError: language = None block['language'] = language block['attributes'] = attr # ensure one identifier for python code if language in ('python', 'py', '', None): block['language'] = self.python # add alternate language execution magic elif language != self.python and self.magic: block['content'] = CodeMagician.magic(language) + block['content'] block['language'] = language return self.new_code_block(**block)
[ "def", "process_code_block", "(", "self", ",", "block", ")", ":", "if", "block", "[", "'type'", "]", "!=", "self", ".", "code", ":", "return", "block", "attr", "=", "PandocAttributes", "(", "block", "[", "'attributes'", "]", ",", "'markdown'", ")", "if", "self", ".", "match", "==", "'all'", ":", "pass", "elif", "self", ".", "match", "==", "'fenced'", "and", "block", ".", "get", "(", "'indent'", ")", ":", "return", "self", ".", "new_text_block", "(", "content", "=", "(", "'\\n'", "+", "block", "[", "'icontent'", "]", "+", "'\\n'", ")", ")", "elif", "self", ".", "match", "==", "'strict'", "and", "'input'", "not", "in", "attr", ".", "classes", ":", "return", "self", ".", "new_text_block", "(", "content", "=", "block", "[", "'raw'", "]", ")", "elif", "self", ".", "match", "not", "in", "list", "(", "attr", ".", "classes", ")", "+", "[", "'fenced'", ",", "'strict'", "]", ":", "return", "self", ".", "new_text_block", "(", "content", "=", "block", "[", "'raw'", "]", ")", "# set input / output status of cell", "if", "'output'", "in", "attr", ".", "classes", "and", "'json'", "in", "attr", ".", "classes", ":", "block", "[", "'IO'", "]", "=", "'output'", "elif", "'input'", "in", "attr", ".", "classes", ":", "block", "[", "'IO'", "]", "=", "'input'", "attr", ".", "classes", ".", "remove", "(", "'input'", ")", "else", ":", "block", "[", "'IO'", "]", "=", "'input'", "if", "self", ".", "caption_comments", ":", "# override attributes id and caption with those set in", "# comments, if they exist", "id", ",", "caption", "=", "get_caption_comments", "(", "block", "[", "'content'", "]", ")", "if", "id", ":", "attr", ".", "id", "=", "id", "if", "caption", ":", "attr", "[", "'caption'", "]", "=", "caption", "try", ":", "# determine the language as the first class that", "# is in the block attributes and also in the list", "# of languages", "language", "=", "set", "(", "attr", ".", "classes", ")", ".", "intersection", "(", "languages", ")", ".", "pop", "(", ")", "attr", ".", "classes", ".", "remove", "(", "language", ")", "except", "KeyError", ":", "language", "=", "None", "block", "[", "'language'", "]", "=", "language", "block", "[", "'attributes'", "]", "=", "attr", "# ensure one identifier for python code", "if", "language", "in", "(", "'python'", ",", "'py'", ",", "''", ",", "None", ")", ":", "block", "[", "'language'", "]", "=", "self", ".", "python", "# add alternate language execution magic", "elif", "language", "!=", "self", ".", "python", "and", "self", ".", "magic", ":", "block", "[", "'content'", "]", "=", "CodeMagician", ".", "magic", "(", "language", ")", "+", "block", "[", "'content'", "]", "block", "[", "'language'", "]", "=", "language", "return", "self", ".", "new_code_block", "(", "*", "*", "block", ")" ]
Parse block attributes
[ "Parse", "block", "attributes" ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L189-L248
train
aaren/notedown
notedown/notedown.py
MarkdownReader.parse_blocks
def parse_blocks(self, text): """Extract the code and non-code blocks from given markdown text. Returns a list of block dictionaries. Each dictionary has at least the keys 'type' and 'content', containing the type of the block ('markdown', 'code') and the contents of the block. Additional keys may be parsed as well. We should switch to an external markdown library if this gets much more complicated! """ code_matches = [m for m in self.code_pattern.finditer(text)] # determine where the limits of the non code bits are # based on the code block edges text_starts = [0] + [m.end() for m in code_matches] text_stops = [m.start() for m in code_matches] + [len(text)] text_limits = list(zip(text_starts, text_stops)) # list of the groups from the code blocks code_blocks = [self.new_code_block(**m.groupdict()) for m in code_matches] text_blocks = [self.new_text_block(content=text[i:j]) for i, j in text_limits] # remove indents list(map(self.pre_process_code_block, code_blocks)) # remove blank line at start and end of markdown list(map(self.pre_process_text_block, text_blocks)) # create a list of the right length all_blocks = list(range(len(text_blocks) + len(code_blocks))) # NOTE: the behaviour here is a bit fragile in that we # assume that cells must alternate between code and # markdown. This isn't the case, as we could have # consecutive code cells, and we get around this by # stripping out empty cells. i.e. two consecutive code cells # have an empty markdown cell between them which is stripped # out because it is empty. # cells must alternate in order all_blocks[::2] = text_blocks all_blocks[1::2] = code_blocks # remove possible empty text cells all_blocks = [cell for cell in all_blocks if cell['content']] return all_blocks
python
def parse_blocks(self, text): """Extract the code and non-code blocks from given markdown text. Returns a list of block dictionaries. Each dictionary has at least the keys 'type' and 'content', containing the type of the block ('markdown', 'code') and the contents of the block. Additional keys may be parsed as well. We should switch to an external markdown library if this gets much more complicated! """ code_matches = [m for m in self.code_pattern.finditer(text)] # determine where the limits of the non code bits are # based on the code block edges text_starts = [0] + [m.end() for m in code_matches] text_stops = [m.start() for m in code_matches] + [len(text)] text_limits = list(zip(text_starts, text_stops)) # list of the groups from the code blocks code_blocks = [self.new_code_block(**m.groupdict()) for m in code_matches] text_blocks = [self.new_text_block(content=text[i:j]) for i, j in text_limits] # remove indents list(map(self.pre_process_code_block, code_blocks)) # remove blank line at start and end of markdown list(map(self.pre_process_text_block, text_blocks)) # create a list of the right length all_blocks = list(range(len(text_blocks) + len(code_blocks))) # NOTE: the behaviour here is a bit fragile in that we # assume that cells must alternate between code and # markdown. This isn't the case, as we could have # consecutive code cells, and we get around this by # stripping out empty cells. i.e. two consecutive code cells # have an empty markdown cell between them which is stripped # out because it is empty. # cells must alternate in order all_blocks[::2] = text_blocks all_blocks[1::2] = code_blocks # remove possible empty text cells all_blocks = [cell for cell in all_blocks if cell['content']] return all_blocks
[ "def", "parse_blocks", "(", "self", ",", "text", ")", ":", "code_matches", "=", "[", "m", "for", "m", "in", "self", ".", "code_pattern", ".", "finditer", "(", "text", ")", "]", "# determine where the limits of the non code bits are", "# based on the code block edges", "text_starts", "=", "[", "0", "]", "+", "[", "m", ".", "end", "(", ")", "for", "m", "in", "code_matches", "]", "text_stops", "=", "[", "m", ".", "start", "(", ")", "for", "m", "in", "code_matches", "]", "+", "[", "len", "(", "text", ")", "]", "text_limits", "=", "list", "(", "zip", "(", "text_starts", ",", "text_stops", ")", ")", "# list of the groups from the code blocks", "code_blocks", "=", "[", "self", ".", "new_code_block", "(", "*", "*", "m", ".", "groupdict", "(", ")", ")", "for", "m", "in", "code_matches", "]", "text_blocks", "=", "[", "self", ".", "new_text_block", "(", "content", "=", "text", "[", "i", ":", "j", "]", ")", "for", "i", ",", "j", "in", "text_limits", "]", "# remove indents", "list", "(", "map", "(", "self", ".", "pre_process_code_block", ",", "code_blocks", ")", ")", "# remove blank line at start and end of markdown", "list", "(", "map", "(", "self", ".", "pre_process_text_block", ",", "text_blocks", ")", ")", "# create a list of the right length", "all_blocks", "=", "list", "(", "range", "(", "len", "(", "text_blocks", ")", "+", "len", "(", "code_blocks", ")", ")", ")", "# NOTE: the behaviour here is a bit fragile in that we", "# assume that cells must alternate between code and", "# markdown. This isn't the case, as we could have", "# consecutive code cells, and we get around this by", "# stripping out empty cells. i.e. two consecutive code cells", "# have an empty markdown cell between them which is stripped", "# out because it is empty.", "# cells must alternate in order", "all_blocks", "[", ":", ":", "2", "]", "=", "text_blocks", "all_blocks", "[", "1", ":", ":", "2", "]", "=", "code_blocks", "# remove possible empty text cells", "all_blocks", "=", "[", "cell", "for", "cell", "in", "all_blocks", "if", "cell", "[", "'content'", "]", "]", "return", "all_blocks" ]
Extract the code and non-code blocks from given markdown text. Returns a list of block dictionaries. Each dictionary has at least the keys 'type' and 'content', containing the type of the block ('markdown', 'code') and the contents of the block. Additional keys may be parsed as well. We should switch to an external markdown library if this gets much more complicated!
[ "Extract", "the", "code", "and", "non", "-", "code", "blocks", "from", "given", "markdown", "text", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L250-L302
train
aaren/notedown
notedown/notedown.py
MarkdownReader.create_code_cell
def create_code_cell(block): """Create a notebook code cell from a block.""" code_cell = nbbase.new_code_cell(source=block['content']) attr = block['attributes'] if not attr.is_empty: code_cell.metadata \ = nbbase.NotebookNode({'attributes': attr.to_dict()}) execution_count = attr.kvs.get('n') if not execution_count: code_cell.execution_count = None else: code_cell.execution_count = int(execution_count) return code_cell
python
def create_code_cell(block): """Create a notebook code cell from a block.""" code_cell = nbbase.new_code_cell(source=block['content']) attr = block['attributes'] if not attr.is_empty: code_cell.metadata \ = nbbase.NotebookNode({'attributes': attr.to_dict()}) execution_count = attr.kvs.get('n') if not execution_count: code_cell.execution_count = None else: code_cell.execution_count = int(execution_count) return code_cell
[ "def", "create_code_cell", "(", "block", ")", ":", "code_cell", "=", "nbbase", ".", "new_code_cell", "(", "source", "=", "block", "[", "'content'", "]", ")", "attr", "=", "block", "[", "'attributes'", "]", "if", "not", "attr", ".", "is_empty", ":", "code_cell", ".", "metadata", "=", "nbbase", ".", "NotebookNode", "(", "{", "'attributes'", ":", "attr", ".", "to_dict", "(", ")", "}", ")", "execution_count", "=", "attr", ".", "kvs", ".", "get", "(", "'n'", ")", "if", "not", "execution_count", ":", "code_cell", ".", "execution_count", "=", "None", "else", ":", "code_cell", ".", "execution_count", "=", "int", "(", "execution_count", ")", "return", "code_cell" ]
Create a notebook code cell from a block.
[ "Create", "a", "notebook", "code", "cell", "from", "a", "block", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L305-L319
train
aaren/notedown
notedown/notedown.py
MarkdownReader.create_markdown_cell
def create_markdown_cell(block): """Create a markdown cell from a block.""" kwargs = {'cell_type': block['type'], 'source': block['content']} markdown_cell = nbbase.new_markdown_cell(**kwargs) return markdown_cell
python
def create_markdown_cell(block): """Create a markdown cell from a block.""" kwargs = {'cell_type': block['type'], 'source': block['content']} markdown_cell = nbbase.new_markdown_cell(**kwargs) return markdown_cell
[ "def", "create_markdown_cell", "(", "block", ")", ":", "kwargs", "=", "{", "'cell_type'", ":", "block", "[", "'type'", "]", ",", "'source'", ":", "block", "[", "'content'", "]", "}", "markdown_cell", "=", "nbbase", ".", "new_markdown_cell", "(", "*", "*", "kwargs", ")", "return", "markdown_cell" ]
Create a markdown cell from a block.
[ "Create", "a", "markdown", "cell", "from", "a", "block", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L322-L327
train
aaren/notedown
notedown/notedown.py
MarkdownReader.create_cells
def create_cells(self, blocks): """Turn the list of blocks into a list of notebook cells.""" cells = [] for block in blocks: if (block['type'] == self.code) and (block['IO'] == 'input'): code_cell = self.create_code_cell(block) cells.append(code_cell) elif (block['type'] == self.code and block['IO'] == 'output' and cells[-1].cell_type == 'code'): cells[-1].outputs = self.create_outputs(block) elif block['type'] == self.markdown: markdown_cell = self.create_markdown_cell(block) cells.append(markdown_cell) else: raise NotImplementedError("{} is not supported as a cell" "type".format(block['type'])) return cells
python
def create_cells(self, blocks): """Turn the list of blocks into a list of notebook cells.""" cells = [] for block in blocks: if (block['type'] == self.code) and (block['IO'] == 'input'): code_cell = self.create_code_cell(block) cells.append(code_cell) elif (block['type'] == self.code and block['IO'] == 'output' and cells[-1].cell_type == 'code'): cells[-1].outputs = self.create_outputs(block) elif block['type'] == self.markdown: markdown_cell = self.create_markdown_cell(block) cells.append(markdown_cell) else: raise NotImplementedError("{} is not supported as a cell" "type".format(block['type'])) return cells
[ "def", "create_cells", "(", "self", ",", "blocks", ")", ":", "cells", "=", "[", "]", "for", "block", "in", "blocks", ":", "if", "(", "block", "[", "'type'", "]", "==", "self", ".", "code", ")", "and", "(", "block", "[", "'IO'", "]", "==", "'input'", ")", ":", "code_cell", "=", "self", ".", "create_code_cell", "(", "block", ")", "cells", ".", "append", "(", "code_cell", ")", "elif", "(", "block", "[", "'type'", "]", "==", "self", ".", "code", "and", "block", "[", "'IO'", "]", "==", "'output'", "and", "cells", "[", "-", "1", "]", ".", "cell_type", "==", "'code'", ")", ":", "cells", "[", "-", "1", "]", ".", "outputs", "=", "self", ".", "create_outputs", "(", "block", ")", "elif", "block", "[", "'type'", "]", "==", "self", ".", "markdown", ":", "markdown_cell", "=", "self", ".", "create_markdown_cell", "(", "block", ")", "cells", ".", "append", "(", "markdown_cell", ")", "else", ":", "raise", "NotImplementedError", "(", "\"{} is not supported as a cell\"", "\"type\"", ".", "format", "(", "block", "[", "'type'", "]", ")", ")", "return", "cells" ]
Turn the list of blocks into a list of notebook cells.
[ "Turn", "the", "list", "of", "blocks", "into", "a", "list", "of", "notebook", "cells", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L337-L358
train
aaren/notedown
notedown/notedown.py
MarkdownReader.to_notebook
def to_notebook(self, s, **kwargs): """Convert the markdown string s to an IPython notebook. Returns a notebook. """ all_blocks = self.parse_blocks(s) if self.pre_code_block['content']: # TODO: if first block is markdown, place after? all_blocks.insert(0, self.pre_code_block) blocks = [self.process_code_block(block) for block in all_blocks] cells = self.create_cells(blocks) nb = nbbase.new_notebook(cells=cells) return nb
python
def to_notebook(self, s, **kwargs): """Convert the markdown string s to an IPython notebook. Returns a notebook. """ all_blocks = self.parse_blocks(s) if self.pre_code_block['content']: # TODO: if first block is markdown, place after? all_blocks.insert(0, self.pre_code_block) blocks = [self.process_code_block(block) for block in all_blocks] cells = self.create_cells(blocks) nb = nbbase.new_notebook(cells=cells) return nb
[ "def", "to_notebook", "(", "self", ",", "s", ",", "*", "*", "kwargs", ")", ":", "all_blocks", "=", "self", ".", "parse_blocks", "(", "s", ")", "if", "self", ".", "pre_code_block", "[", "'content'", "]", ":", "# TODO: if first block is markdown, place after?", "all_blocks", ".", "insert", "(", "0", ",", "self", ".", "pre_code_block", ")", "blocks", "=", "[", "self", ".", "process_code_block", "(", "block", ")", "for", "block", "in", "all_blocks", "]", "cells", "=", "self", ".", "create_cells", "(", "blocks", ")", "nb", "=", "nbbase", ".", "new_notebook", "(", "cells", "=", "cells", ")", "return", "nb" ]
Convert the markdown string s to an IPython notebook. Returns a notebook.
[ "Convert", "the", "markdown", "string", "s", "to", "an", "IPython", "notebook", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L360-L376
train
aaren/notedown
notedown/notedown.py
MarkdownWriter.write_resources
def write_resources(self, resources): """Write the output data in resources returned by exporter to files. """ for filename, data in list(resources.get('outputs', {}).items()): # Determine where to write the file to dest = os.path.join(self.output_dir, filename) path = os.path.dirname(dest) if path and not os.path.isdir(path): os.makedirs(path) # Write file with open(dest, 'wb') as f: f.write(data)
python
def write_resources(self, resources): """Write the output data in resources returned by exporter to files. """ for filename, data in list(resources.get('outputs', {}).items()): # Determine where to write the file to dest = os.path.join(self.output_dir, filename) path = os.path.dirname(dest) if path and not os.path.isdir(path): os.makedirs(path) # Write file with open(dest, 'wb') as f: f.write(data)
[ "def", "write_resources", "(", "self", ",", "resources", ")", ":", "for", "filename", ",", "data", "in", "list", "(", "resources", ".", "get", "(", "'outputs'", ",", "{", "}", ")", ".", "items", "(", ")", ")", ":", "# Determine where to write the file to", "dest", "=", "os", ".", "path", ".", "join", "(", "self", ".", "output_dir", ",", "filename", ")", "path", "=", "os", ".", "path", ".", "dirname", "(", "dest", ")", "if", "path", "and", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "os", ".", "makedirs", "(", "path", ")", "# Write file", "with", "open", "(", "dest", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "data", ")" ]
Write the output data in resources returned by exporter to files.
[ "Write", "the", "output", "data", "in", "resources", "returned", "by", "exporter", "to", "files", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L445-L458
train
aaren/notedown
notedown/notedown.py
MarkdownWriter.string2json
def string2json(self, string): """Convert json into its string representation. Used for writing outputs to markdown.""" kwargs = { 'cls': BytesEncoder, # use the IPython bytes encoder 'indent': 1, 'sort_keys': True, 'separators': (',', ': '), } return cast_unicode(json.dumps(string, **kwargs), 'utf-8')
python
def string2json(self, string): """Convert json into its string representation. Used for writing outputs to markdown.""" kwargs = { 'cls': BytesEncoder, # use the IPython bytes encoder 'indent': 1, 'sort_keys': True, 'separators': (',', ': '), } return cast_unicode(json.dumps(string, **kwargs), 'utf-8')
[ "def", "string2json", "(", "self", ",", "string", ")", ":", "kwargs", "=", "{", "'cls'", ":", "BytesEncoder", ",", "# use the IPython bytes encoder", "'indent'", ":", "1", ",", "'sort_keys'", ":", "True", ",", "'separators'", ":", "(", "','", ",", "': '", ")", ",", "}", "return", "cast_unicode", "(", "json", ".", "dumps", "(", "string", ",", "*", "*", "kwargs", ")", ",", "'utf-8'", ")" ]
Convert json into its string representation. Used for writing outputs to markdown.
[ "Convert", "json", "into", "its", "string", "representation", ".", "Used", "for", "writing", "outputs", "to", "markdown", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L461-L470
train
aaren/notedown
notedown/notedown.py
MarkdownWriter.create_attributes
def create_attributes(self, cell, cell_type=None): """Turn the attribute dict into an attribute string for the code block. """ if self.strip_outputs or not hasattr(cell, 'execution_count'): return 'python' attrs = cell.metadata.get('attributes') attr = PandocAttributes(attrs, 'dict') if 'python' in attr.classes: attr.classes.remove('python') if 'input' in attr.classes: attr.classes.remove('input') if cell_type == 'figure': attr.kvs.pop('caption', '') attr.classes.append('figure') attr.classes.append('output') return attr.to_html() elif cell_type == 'input': # ensure python goes first so that github highlights it attr.classes.insert(0, 'python') attr.classes.insert(1, 'input') if cell.execution_count: attr.kvs['n'] = cell.execution_count return attr.to_markdown(format='{classes} {id} {kvs}') else: return attr.to_markdown()
python
def create_attributes(self, cell, cell_type=None): """Turn the attribute dict into an attribute string for the code block. """ if self.strip_outputs or not hasattr(cell, 'execution_count'): return 'python' attrs = cell.metadata.get('attributes') attr = PandocAttributes(attrs, 'dict') if 'python' in attr.classes: attr.classes.remove('python') if 'input' in attr.classes: attr.classes.remove('input') if cell_type == 'figure': attr.kvs.pop('caption', '') attr.classes.append('figure') attr.classes.append('output') return attr.to_html() elif cell_type == 'input': # ensure python goes first so that github highlights it attr.classes.insert(0, 'python') attr.classes.insert(1, 'input') if cell.execution_count: attr.kvs['n'] = cell.execution_count return attr.to_markdown(format='{classes} {id} {kvs}') else: return attr.to_markdown()
[ "def", "create_attributes", "(", "self", ",", "cell", ",", "cell_type", "=", "None", ")", ":", "if", "self", ".", "strip_outputs", "or", "not", "hasattr", "(", "cell", ",", "'execution_count'", ")", ":", "return", "'python'", "attrs", "=", "cell", ".", "metadata", ".", "get", "(", "'attributes'", ")", "attr", "=", "PandocAttributes", "(", "attrs", ",", "'dict'", ")", "if", "'python'", "in", "attr", ".", "classes", ":", "attr", ".", "classes", ".", "remove", "(", "'python'", ")", "if", "'input'", "in", "attr", ".", "classes", ":", "attr", ".", "classes", ".", "remove", "(", "'input'", ")", "if", "cell_type", "==", "'figure'", ":", "attr", ".", "kvs", ".", "pop", "(", "'caption'", ",", "''", ")", "attr", ".", "classes", ".", "append", "(", "'figure'", ")", "attr", ".", "classes", ".", "append", "(", "'output'", ")", "return", "attr", ".", "to_html", "(", ")", "elif", "cell_type", "==", "'input'", ":", "# ensure python goes first so that github highlights it", "attr", ".", "classes", ".", "insert", "(", "0", ",", "'python'", ")", "attr", ".", "classes", ".", "insert", "(", "1", ",", "'input'", ")", "if", "cell", ".", "execution_count", ":", "attr", ".", "kvs", "[", "'n'", "]", "=", "cell", ".", "execution_count", "return", "attr", ".", "to_markdown", "(", "format", "=", "'{classes} {id} {kvs}'", ")", "else", ":", "return", "attr", ".", "to_markdown", "(", ")" ]
Turn the attribute dict into an attribute string for the code block.
[ "Turn", "the", "attribute", "dict", "into", "an", "attribute", "string", "for", "the", "code", "block", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L493-L523
train
aaren/notedown
notedown/notedown.py
MarkdownWriter.dequote
def dequote(s): """Remove excess quotes from a string.""" if len(s) < 2: return s elif (s[0] == s[-1]) and s.startswith(('"', "'")): return s[1: -1] else: return s
python
def dequote(s): """Remove excess quotes from a string.""" if len(s) < 2: return s elif (s[0] == s[-1]) and s.startswith(('"', "'")): return s[1: -1] else: return s
[ "def", "dequote", "(", "s", ")", ":", "if", "len", "(", "s", ")", "<", "2", ":", "return", "s", "elif", "(", "s", "[", "0", "]", "==", "s", "[", "-", "1", "]", ")", "and", "s", ".", "startswith", "(", "(", "'\"'", ",", "\"'\"", ")", ")", ":", "return", "s", "[", "1", ":", "-", "1", "]", "else", ":", "return", "s" ]
Remove excess quotes from a string.
[ "Remove", "excess", "quotes", "from", "a", "string", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L526-L533
train
aaren/notedown
notedown/notedown.py
MarkdownWriter.data2uri
def data2uri(data, data_type): """Convert base64 data into a data uri with the given data_type.""" MIME_MAP = { 'image/jpeg': 'jpeg', 'image/png': 'png', 'text/plain': 'text', 'text/html': 'html', 'text/latex': 'latex', 'application/javascript': 'html', 'image/svg+xml': 'svg', } inverse_map = {v: k for k, v in list(MIME_MAP.items())} mime_type = inverse_map[data_type] uri = r"data:{mime};base64,{data}" return uri.format(mime=mime_type, data=data[mime_type].replace('\n', ''))
python
def data2uri(data, data_type): """Convert base64 data into a data uri with the given data_type.""" MIME_MAP = { 'image/jpeg': 'jpeg', 'image/png': 'png', 'text/plain': 'text', 'text/html': 'html', 'text/latex': 'latex', 'application/javascript': 'html', 'image/svg+xml': 'svg', } inverse_map = {v: k for k, v in list(MIME_MAP.items())} mime_type = inverse_map[data_type] uri = r"data:{mime};base64,{data}" return uri.format(mime=mime_type, data=data[mime_type].replace('\n', ''))
[ "def", "data2uri", "(", "data", ",", "data_type", ")", ":", "MIME_MAP", "=", "{", "'image/jpeg'", ":", "'jpeg'", ",", "'image/png'", ":", "'png'", ",", "'text/plain'", ":", "'text'", ",", "'text/html'", ":", "'html'", ",", "'text/latex'", ":", "'latex'", ",", "'application/javascript'", ":", "'html'", ",", "'image/svg+xml'", ":", "'svg'", ",", "}", "inverse_map", "=", "{", "v", ":", "k", "for", "k", ",", "v", "in", "list", "(", "MIME_MAP", ".", "items", "(", ")", ")", "}", "mime_type", "=", "inverse_map", "[", "data_type", "]", "uri", "=", "r\"data:{mime};base64,{data}\"", "return", "uri", ".", "format", "(", "mime", "=", "mime_type", ",", "data", "=", "data", "[", "mime_type", "]", ".", "replace", "(", "'\\n'", ",", "''", ")", ")" ]
Convert base64 data into a data uri with the given data_type.
[ "Convert", "base64", "data", "into", "a", "data", "uri", "with", "the", "given", "data_type", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L536-L551
train
aaren/notedown
notedown/notedown.py
CodeMagician.magic
def magic(self, alias): """Returns the appropriate IPython code magic when called with an alias for a language. """ if alias in self.aliases: return self.aliases[alias] else: return "%%{}\n".format(alias)
python
def magic(self, alias): """Returns the appropriate IPython code magic when called with an alias for a language. """ if alias in self.aliases: return self.aliases[alias] else: return "%%{}\n".format(alias)
[ "def", "magic", "(", "self", ",", "alias", ")", ":", "if", "alias", "in", "self", ".", "aliases", ":", "return", "self", ".", "aliases", "[", "alias", "]", "else", ":", "return", "\"%%{}\\n\"", ".", "format", "(", "alias", ")" ]
Returns the appropriate IPython code magic when called with an alias for a language.
[ "Returns", "the", "appropriate", "IPython", "code", "magic", "when", "called", "with", "an", "alias", "for", "a", "language", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L565-L572
train
aaren/notedown
notedown/notedown.py
Knitr.knit
def knit(self, input_file, opts_chunk='eval=FALSE'): """Use Knitr to convert the r-markdown input_file into markdown, returning a file object. """ # use temporary files at both ends to allow stdin / stdout tmp_in = tempfile.NamedTemporaryFile(mode='w+') tmp_out = tempfile.NamedTemporaryFile(mode='w+') tmp_in.file.write(input_file.read()) tmp_in.file.flush() tmp_in.file.seek(0) self._knit(tmp_in.name, tmp_out.name, opts_chunk) tmp_out.file.flush() return tmp_out
python
def knit(self, input_file, opts_chunk='eval=FALSE'): """Use Knitr to convert the r-markdown input_file into markdown, returning a file object. """ # use temporary files at both ends to allow stdin / stdout tmp_in = tempfile.NamedTemporaryFile(mode='w+') tmp_out = tempfile.NamedTemporaryFile(mode='w+') tmp_in.file.write(input_file.read()) tmp_in.file.flush() tmp_in.file.seek(0) self._knit(tmp_in.name, tmp_out.name, opts_chunk) tmp_out.file.flush() return tmp_out
[ "def", "knit", "(", "self", ",", "input_file", ",", "opts_chunk", "=", "'eval=FALSE'", ")", ":", "# use temporary files at both ends to allow stdin / stdout", "tmp_in", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w+'", ")", "tmp_out", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'w+'", ")", "tmp_in", ".", "file", ".", "write", "(", "input_file", ".", "read", "(", ")", ")", "tmp_in", ".", "file", ".", "flush", "(", ")", "tmp_in", ".", "file", ".", "seek", "(", "0", ")", "self", ".", "_knit", "(", "tmp_in", ".", "name", ",", "tmp_out", ".", "name", ",", "opts_chunk", ")", "tmp_out", ".", "file", ".", "flush", "(", ")", "return", "tmp_out" ]
Use Knitr to convert the r-markdown input_file into markdown, returning a file object.
[ "Use", "Knitr", "to", "convert", "the", "r", "-", "markdown", "input_file", "into", "markdown", "returning", "a", "file", "object", "." ]
1e920c7e4ecbe47420c12eed3d5bcae735121222
https://github.com/aaren/notedown/blob/1e920c7e4ecbe47420c12eed3d5bcae735121222/notedown/notedown.py#L602-L616
train
cyface/django-termsandconditions
termsandconditions/middleware.py
is_path_protected
def is_path_protected(path): """ returns True if given path is to be protected, otherwise False The path is not to be protected when it appears on: TERMS_EXCLUDE_URL_PREFIX_LIST, TERMS_EXCLUDE_URL_LIST, TERMS_EXCLUDE_URL_CONTAINS_LIST or as ACCEPT_TERMS_PATH """ protected = True for exclude_path in TERMS_EXCLUDE_URL_PREFIX_LIST: if path.startswith(exclude_path): protected = False for contains_path in TERMS_EXCLUDE_URL_CONTAINS_LIST: if contains_path in path: protected = False if path in TERMS_EXCLUDE_URL_LIST: protected = False if path.startswith(ACCEPT_TERMS_PATH): protected = False return protected
python
def is_path_protected(path): """ returns True if given path is to be protected, otherwise False The path is not to be protected when it appears on: TERMS_EXCLUDE_URL_PREFIX_LIST, TERMS_EXCLUDE_URL_LIST, TERMS_EXCLUDE_URL_CONTAINS_LIST or as ACCEPT_TERMS_PATH """ protected = True for exclude_path in TERMS_EXCLUDE_URL_PREFIX_LIST: if path.startswith(exclude_path): protected = False for contains_path in TERMS_EXCLUDE_URL_CONTAINS_LIST: if contains_path in path: protected = False if path in TERMS_EXCLUDE_URL_LIST: protected = False if path.startswith(ACCEPT_TERMS_PATH): protected = False return protected
[ "def", "is_path_protected", "(", "path", ")", ":", "protected", "=", "True", "for", "exclude_path", "in", "TERMS_EXCLUDE_URL_PREFIX_LIST", ":", "if", "path", ".", "startswith", "(", "exclude_path", ")", ":", "protected", "=", "False", "for", "contains_path", "in", "TERMS_EXCLUDE_URL_CONTAINS_LIST", ":", "if", "contains_path", "in", "path", ":", "protected", "=", "False", "if", "path", "in", "TERMS_EXCLUDE_URL_LIST", ":", "protected", "=", "False", "if", "path", ".", "startswith", "(", "ACCEPT_TERMS_PATH", ")", ":", "protected", "=", "False", "return", "protected" ]
returns True if given path is to be protected, otherwise False The path is not to be protected when it appears on: TERMS_EXCLUDE_URL_PREFIX_LIST, TERMS_EXCLUDE_URL_LIST, TERMS_EXCLUDE_URL_CONTAINS_LIST or as ACCEPT_TERMS_PATH
[ "returns", "True", "if", "given", "path", "is", "to", "be", "protected", "otherwise", "False" ]
e18f06d0bad1e047f99222d1153f6e2b3bd5224f
https://github.com/cyface/django-termsandconditions/blob/e18f06d0bad1e047f99222d1153f6e2b3bd5224f/termsandconditions/middleware.py#L49-L74
train
cyface/django-termsandconditions
termsandconditions/middleware.py
TermsAndConditionsRedirectMiddleware.process_request
def process_request(self, request): """Process each request to app to ensure terms have been accepted""" LOGGER.debug('termsandconditions.middleware') current_path = request.META['PATH_INFO'] if DJANGO_VERSION <= (2, 0, 0): user_authenticated = request.user.is_authenticated() else: user_authenticated = request.user.is_authenticated if user_authenticated and is_path_protected(current_path): for term in TermsAndConditions.get_active_terms_not_agreed_to(request.user): # Check for querystring and include it if there is one qs = request.META['QUERY_STRING'] current_path += '?' + qs if qs else '' return redirect_to_terms_accept(current_path, term.slug) return None
python
def process_request(self, request): """Process each request to app to ensure terms have been accepted""" LOGGER.debug('termsandconditions.middleware') current_path = request.META['PATH_INFO'] if DJANGO_VERSION <= (2, 0, 0): user_authenticated = request.user.is_authenticated() else: user_authenticated = request.user.is_authenticated if user_authenticated and is_path_protected(current_path): for term in TermsAndConditions.get_active_terms_not_agreed_to(request.user): # Check for querystring and include it if there is one qs = request.META['QUERY_STRING'] current_path += '?' + qs if qs else '' return redirect_to_terms_accept(current_path, term.slug) return None
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "LOGGER", ".", "debug", "(", "'termsandconditions.middleware'", ")", "current_path", "=", "request", ".", "META", "[", "'PATH_INFO'", "]", "if", "DJANGO_VERSION", "<=", "(", "2", ",", "0", ",", "0", ")", ":", "user_authenticated", "=", "request", ".", "user", ".", "is_authenticated", "(", ")", "else", ":", "user_authenticated", "=", "request", ".", "user", ".", "is_authenticated", "if", "user_authenticated", "and", "is_path_protected", "(", "current_path", ")", ":", "for", "term", "in", "TermsAndConditions", ".", "get_active_terms_not_agreed_to", "(", "request", ".", "user", ")", ":", "# Check for querystring and include it if there is one", "qs", "=", "request", ".", "META", "[", "'QUERY_STRING'", "]", "current_path", "+=", "'?'", "+", "qs", "if", "qs", "else", "''", "return", "redirect_to_terms_accept", "(", "current_path", ",", "term", ".", "slug", ")", "return", "None" ]
Process each request to app to ensure terms have been accepted
[ "Process", "each", "request", "to", "app", "to", "ensure", "terms", "have", "been", "accepted" ]
e18f06d0bad1e047f99222d1153f6e2b3bd5224f
https://github.com/cyface/django-termsandconditions/blob/e18f06d0bad1e047f99222d1153f6e2b3bd5224f/termsandconditions/middleware.py#L27-L46
train