repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | install_from_zip | def install_from_zip(pkgpath, install_path, register_func, delete_after_install=False):
"""Install plugin from zipfile."""
logger.debug("%s is a file, attempting to load zip", pkgpath)
pkgtempdir = tempfile.mkdtemp(prefix="honeycomb_")
try:
with zipfile.ZipFile(pkgpath) as pkgzip:
pkgzip.extractall(pkgtempdir)
except zipfile.BadZipfile as exc:
logger.debug(str(exc))
raise click.ClickException(str(exc))
if delete_after_install:
logger.debug("deleting %s", pkgpath)
os.remove(pkgpath)
logger.debug("installing from unzipped folder %s", pkgtempdir)
return install_dir(pkgtempdir, install_path, register_func, delete_after_install=True) | python | def install_from_zip(pkgpath, install_path, register_func, delete_after_install=False):
"""Install plugin from zipfile."""
logger.debug("%s is a file, attempting to load zip", pkgpath)
pkgtempdir = tempfile.mkdtemp(prefix="honeycomb_")
try:
with zipfile.ZipFile(pkgpath) as pkgzip:
pkgzip.extractall(pkgtempdir)
except zipfile.BadZipfile as exc:
logger.debug(str(exc))
raise click.ClickException(str(exc))
if delete_after_install:
logger.debug("deleting %s", pkgpath)
os.remove(pkgpath)
logger.debug("installing from unzipped folder %s", pkgtempdir)
return install_dir(pkgtempdir, install_path, register_func, delete_after_install=True) | [
"def",
"install_from_zip",
"(",
"pkgpath",
",",
"install_path",
",",
"register_func",
",",
"delete_after_install",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"\"%s is a file, attempting to load zip\"",
",",
"pkgpath",
")",
"pkgtempdir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"\"honeycomb_\"",
")",
"try",
":",
"with",
"zipfile",
".",
"ZipFile",
"(",
"pkgpath",
")",
"as",
"pkgzip",
":",
"pkgzip",
".",
"extractall",
"(",
"pkgtempdir",
")",
"except",
"zipfile",
".",
"BadZipfile",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
")",
"raise",
"click",
".",
"ClickException",
"(",
"str",
"(",
"exc",
")",
")",
"if",
"delete_after_install",
":",
"logger",
".",
"debug",
"(",
"\"deleting %s\"",
",",
"pkgpath",
")",
"os",
".",
"remove",
"(",
"pkgpath",
")",
"logger",
".",
"debug",
"(",
"\"installing from unzipped folder %s\"",
",",
"pkgtempdir",
")",
"return",
"install_dir",
"(",
"pkgtempdir",
",",
"install_path",
",",
"register_func",
",",
"delete_after_install",
"=",
"True",
")"
] | Install plugin from zipfile. | [
"Install",
"plugin",
"from",
"zipfile",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L187-L201 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | install_from_repo | def install_from_repo(pkgname, plugin_type, install_path, register_func):
"""Install plugin from online repo."""
rsession = requests.Session()
rsession.mount("https://", HTTPAdapter(max_retries=3))
logger.debug("trying to install %s from online repo", pkgname)
pkgurl = "{}/{}s/{}.zip".format(defs.GITHUB_RAW, plugin_type, pkgname)
try:
logger.debug("Requesting HTTP HEAD: %s", pkgurl)
r = rsession.head(pkgurl)
r.raise_for_status()
total_size = int(r.headers.get("content-length", 0))
pkgsize = _sizeof_fmt(total_size)
with click.progressbar(length=total_size, label="Downloading {} {} ({}).."
.format(plugin_type, pkgname, pkgsize)) as bar:
r = rsession.get(pkgurl, stream=True)
with tempfile.NamedTemporaryFile(delete=False) as f:
downloaded_bytes = 0
for chunk in r.iter_content(chunk_size=1): # TODO: Consider increasing to reduce cycles
if chunk:
f.write(chunk)
downloaded_bytes += len(chunk)
bar.update(downloaded_bytes)
return install_from_zip(f.name, install_path, register_func, delete_after_install=True)
except requests.exceptions.HTTPError as exc:
logger.debug(str(exc))
raise exceptions.PluginNotFoundInOnlineRepo(pkgname)
except requests.exceptions.ConnectionError as exc:
logger.debug(str(exc))
raise exceptions.PluginRepoConnectionError() | python | def install_from_repo(pkgname, plugin_type, install_path, register_func):
"""Install plugin from online repo."""
rsession = requests.Session()
rsession.mount("https://", HTTPAdapter(max_retries=3))
logger.debug("trying to install %s from online repo", pkgname)
pkgurl = "{}/{}s/{}.zip".format(defs.GITHUB_RAW, plugin_type, pkgname)
try:
logger.debug("Requesting HTTP HEAD: %s", pkgurl)
r = rsession.head(pkgurl)
r.raise_for_status()
total_size = int(r.headers.get("content-length", 0))
pkgsize = _sizeof_fmt(total_size)
with click.progressbar(length=total_size, label="Downloading {} {} ({}).."
.format(plugin_type, pkgname, pkgsize)) as bar:
r = rsession.get(pkgurl, stream=True)
with tempfile.NamedTemporaryFile(delete=False) as f:
downloaded_bytes = 0
for chunk in r.iter_content(chunk_size=1): # TODO: Consider increasing to reduce cycles
if chunk:
f.write(chunk)
downloaded_bytes += len(chunk)
bar.update(downloaded_bytes)
return install_from_zip(f.name, install_path, register_func, delete_after_install=True)
except requests.exceptions.HTTPError as exc:
logger.debug(str(exc))
raise exceptions.PluginNotFoundInOnlineRepo(pkgname)
except requests.exceptions.ConnectionError as exc:
logger.debug(str(exc))
raise exceptions.PluginRepoConnectionError() | [
"def",
"install_from_repo",
"(",
"pkgname",
",",
"plugin_type",
",",
"install_path",
",",
"register_func",
")",
":",
"rsession",
"=",
"requests",
".",
"Session",
"(",
")",
"rsession",
".",
"mount",
"(",
"\"https://\"",
",",
"HTTPAdapter",
"(",
"max_retries",
"=",
"3",
")",
")",
"logger",
".",
"debug",
"(",
"\"trying to install %s from online repo\"",
",",
"pkgname",
")",
"pkgurl",
"=",
"\"{}/{}s/{}.zip\"",
".",
"format",
"(",
"defs",
".",
"GITHUB_RAW",
",",
"plugin_type",
",",
"pkgname",
")",
"try",
":",
"logger",
".",
"debug",
"(",
"\"Requesting HTTP HEAD: %s\"",
",",
"pkgurl",
")",
"r",
"=",
"rsession",
".",
"head",
"(",
"pkgurl",
")",
"r",
".",
"raise_for_status",
"(",
")",
"total_size",
"=",
"int",
"(",
"r",
".",
"headers",
".",
"get",
"(",
"\"content-length\"",
",",
"0",
")",
")",
"pkgsize",
"=",
"_sizeof_fmt",
"(",
"total_size",
")",
"with",
"click",
".",
"progressbar",
"(",
"length",
"=",
"total_size",
",",
"label",
"=",
"\"Downloading {} {} ({})..\"",
".",
"format",
"(",
"plugin_type",
",",
"pkgname",
",",
"pkgsize",
")",
")",
"as",
"bar",
":",
"r",
"=",
"rsession",
".",
"get",
"(",
"pkgurl",
",",
"stream",
"=",
"True",
")",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"delete",
"=",
"False",
")",
"as",
"f",
":",
"downloaded_bytes",
"=",
"0",
"for",
"chunk",
"in",
"r",
".",
"iter_content",
"(",
"chunk_size",
"=",
"1",
")",
":",
"# TODO: Consider increasing to reduce cycles",
"if",
"chunk",
":",
"f",
".",
"write",
"(",
"chunk",
")",
"downloaded_bytes",
"+=",
"len",
"(",
"chunk",
")",
"bar",
".",
"update",
"(",
"downloaded_bytes",
")",
"return",
"install_from_zip",
"(",
"f",
".",
"name",
",",
"install_path",
",",
"register_func",
",",
"delete_after_install",
"=",
"True",
")",
"except",
"requests",
".",
"exceptions",
".",
"HTTPError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
")",
"raise",
"exceptions",
".",
"PluginNotFoundInOnlineRepo",
"(",
"pkgname",
")",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
")",
"raise",
"exceptions",
".",
"PluginRepoConnectionError",
"(",
")"
] | Install plugin from online repo. | [
"Install",
"plugin",
"from",
"online",
"repo",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L204-L233 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | uninstall_plugin | def uninstall_plugin(pkgpath, force):
"""Uninstall a plugin.
:param pkgpath: Path to package to uninstall (delete)
:param force: Force uninstall without asking
"""
pkgname = os.path.basename(pkgpath)
if os.path.exists(pkgpath):
if not force:
click.confirm("[?] Are you sure you want to delete `{}` from honeycomb?".format(pkgname),
abort=True)
try:
shutil.rmtree(pkgpath)
logger.debug("successfully uninstalled {}".format(pkgname))
click.secho("[*] Uninstalled {}".format(pkgname))
except OSError as exc:
logger.exception(str(exc))
else:
click.secho("[-] doh! I cannot seem to find `{}`, are you sure it's installed?".format(pkgname)) | python | def uninstall_plugin(pkgpath, force):
"""Uninstall a plugin.
:param pkgpath: Path to package to uninstall (delete)
:param force: Force uninstall without asking
"""
pkgname = os.path.basename(pkgpath)
if os.path.exists(pkgpath):
if not force:
click.confirm("[?] Are you sure you want to delete `{}` from honeycomb?".format(pkgname),
abort=True)
try:
shutil.rmtree(pkgpath)
logger.debug("successfully uninstalled {}".format(pkgname))
click.secho("[*] Uninstalled {}".format(pkgname))
except OSError as exc:
logger.exception(str(exc))
else:
click.secho("[-] doh! I cannot seem to find `{}`, are you sure it's installed?".format(pkgname)) | [
"def",
"uninstall_plugin",
"(",
"pkgpath",
",",
"force",
")",
":",
"pkgname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"pkgpath",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pkgpath",
")",
":",
"if",
"not",
"force",
":",
"click",
".",
"confirm",
"(",
"\"[?] Are you sure you want to delete `{}` from honeycomb?\"",
".",
"format",
"(",
"pkgname",
")",
",",
"abort",
"=",
"True",
")",
"try",
":",
"shutil",
".",
"rmtree",
"(",
"pkgpath",
")",
"logger",
".",
"debug",
"(",
"\"successfully uninstalled {}\"",
".",
"format",
"(",
"pkgname",
")",
")",
"click",
".",
"secho",
"(",
"\"[*] Uninstalled {}\"",
".",
"format",
"(",
"pkgname",
")",
")",
"except",
"OSError",
"as",
"exc",
":",
"logger",
".",
"exception",
"(",
"str",
"(",
"exc",
")",
")",
"else",
":",
"click",
".",
"secho",
"(",
"\"[-] doh! I cannot seem to find `{}`, are you sure it's installed?\"",
".",
"format",
"(",
"pkgname",
")",
")"
] | Uninstall a plugin.
:param pkgpath: Path to package to uninstall (delete)
:param force: Force uninstall without asking | [
"Uninstall",
"a",
"plugin",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L246-L264 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | list_remote_plugins | def list_remote_plugins(installed_plugins, plugin_type):
"""List remote plugins from online repo."""
click.secho("\n[*] Additional plugins from online repository:")
try:
rsession = requests.Session()
rsession.mount("https://", HTTPAdapter(max_retries=3))
r = rsession.get("{0}/{1}s/{1}s.txt".format(defs.GITHUB_RAW, plugin_type))
logger.debug("fetching %ss from remote repo", plugin_type)
plugins = [_ for _ in r.text.splitlines() if _ not in installed_plugins]
click.secho(" ".join(plugins))
except requests.exceptions.ConnectionError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Unable to fetch {} information from online repository".format(plugin_type)) | python | def list_remote_plugins(installed_plugins, plugin_type):
"""List remote plugins from online repo."""
click.secho("\n[*] Additional plugins from online repository:")
try:
rsession = requests.Session()
rsession.mount("https://", HTTPAdapter(max_retries=3))
r = rsession.get("{0}/{1}s/{1}s.txt".format(defs.GITHUB_RAW, plugin_type))
logger.debug("fetching %ss from remote repo", plugin_type)
plugins = [_ for _ in r.text.splitlines() if _ not in installed_plugins]
click.secho(" ".join(plugins))
except requests.exceptions.ConnectionError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Unable to fetch {} information from online repository".format(plugin_type)) | [
"def",
"list_remote_plugins",
"(",
"installed_plugins",
",",
"plugin_type",
")",
":",
"click",
".",
"secho",
"(",
"\"\\n[*] Additional plugins from online repository:\"",
")",
"try",
":",
"rsession",
"=",
"requests",
".",
"Session",
"(",
")",
"rsession",
".",
"mount",
"(",
"\"https://\"",
",",
"HTTPAdapter",
"(",
"max_retries",
"=",
"3",
")",
")",
"r",
"=",
"rsession",
".",
"get",
"(",
"\"{0}/{1}s/{1}s.txt\"",
".",
"format",
"(",
"defs",
".",
"GITHUB_RAW",
",",
"plugin_type",
")",
")",
"logger",
".",
"debug",
"(",
"\"fetching %ss from remote repo\"",
",",
"plugin_type",
")",
"plugins",
"=",
"[",
"_",
"for",
"_",
"in",
"r",
".",
"text",
".",
"splitlines",
"(",
")",
"if",
"_",
"not",
"in",
"installed_plugins",
"]",
"click",
".",
"secho",
"(",
"\" \"",
".",
"join",
"(",
"plugins",
")",
")",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
",",
"exc_info",
"=",
"True",
")",
"raise",
"click",
".",
"ClickException",
"(",
"\"Unable to fetch {} information from online repository\"",
".",
"format",
"(",
"plugin_type",
")",
")"
] | List remote plugins from online repo. | [
"List",
"remote",
"plugins",
"from",
"online",
"repo",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L267-L281 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | list_local_plugins | def list_local_plugins(plugin_type, plugins_path, plugin_details):
"""List local plugins with details."""
installed_plugins = list()
for plugin in next(os.walk(plugins_path))[1]:
s = plugin_details(plugin)
installed_plugins.append(plugin)
click.secho(s)
if not installed_plugins:
click.secho("[*] You do not have any {0}s installed, "
"try installing one with `honeycomb {0} install`".format(plugin_type))
return installed_plugins | python | def list_local_plugins(plugin_type, plugins_path, plugin_details):
"""List local plugins with details."""
installed_plugins = list()
for plugin in next(os.walk(plugins_path))[1]:
s = plugin_details(plugin)
installed_plugins.append(plugin)
click.secho(s)
if not installed_plugins:
click.secho("[*] You do not have any {0}s installed, "
"try installing one with `honeycomb {0} install`".format(plugin_type))
return installed_plugins | [
"def",
"list_local_plugins",
"(",
"plugin_type",
",",
"plugins_path",
",",
"plugin_details",
")",
":",
"installed_plugins",
"=",
"list",
"(",
")",
"for",
"plugin",
"in",
"next",
"(",
"os",
".",
"walk",
"(",
"plugins_path",
")",
")",
"[",
"1",
"]",
":",
"s",
"=",
"plugin_details",
"(",
"plugin",
")",
"installed_plugins",
".",
"append",
"(",
"plugin",
")",
"click",
".",
"secho",
"(",
"s",
")",
"if",
"not",
"installed_plugins",
":",
"click",
".",
"secho",
"(",
"\"[*] You do not have any {0}s installed, \"",
"\"try installing one with `honeycomb {0} install`\"",
".",
"format",
"(",
"plugin_type",
")",
")",
"return",
"installed_plugins"
] | List local plugins with details. | [
"List",
"local",
"plugins",
"with",
"details",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L284-L296 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | parse_plugin_args | def parse_plugin_args(command_args, config_args):
"""Parse command line arguments based on the plugin's parameters config.
:param command_args: Command line arguments as provided by the user in `key=value` format.
:param config_args: Plugin parameters parsed from config.json.
:returns: Validated dictionary of parameters that will be passed to plugin class
"""
parsed_args = dict()
for arg in command_args:
kv = arg.split("=")
if len(kv) != 2:
raise click.UsageError("Invalid parameter '{}', must be in key=value format".format(arg))
parsed_args[kv[0]] = config_utils.get_truetype(kv[1])
for arg in config_args:
value = arg[defs.VALUE]
value_type = arg[defs.TYPE]
if value in parsed_args:
# will raise if invalid
config_utils.validate_field_matches_type(value, parsed_args[value], value_type,
arg.get(defs.ITEMS), arg.get(defs.MIN), arg.get(defs.MAX))
elif defs.DEFAULT in arg: # Has a default field
# return default values for unset parameters
parsed_args[value] = arg[defs.DEFAULT]
elif arg[defs.REQUIRED]: # requires field is true
"""parameter was not supplied by user, but it's required and has no default value"""
raise exceptions.RequiredFieldMissing(value)
return parsed_args | python | def parse_plugin_args(command_args, config_args):
"""Parse command line arguments based on the plugin's parameters config.
:param command_args: Command line arguments as provided by the user in `key=value` format.
:param config_args: Plugin parameters parsed from config.json.
:returns: Validated dictionary of parameters that will be passed to plugin class
"""
parsed_args = dict()
for arg in command_args:
kv = arg.split("=")
if len(kv) != 2:
raise click.UsageError("Invalid parameter '{}', must be in key=value format".format(arg))
parsed_args[kv[0]] = config_utils.get_truetype(kv[1])
for arg in config_args:
value = arg[defs.VALUE]
value_type = arg[defs.TYPE]
if value in parsed_args:
# will raise if invalid
config_utils.validate_field_matches_type(value, parsed_args[value], value_type,
arg.get(defs.ITEMS), arg.get(defs.MIN), arg.get(defs.MAX))
elif defs.DEFAULT in arg: # Has a default field
# return default values for unset parameters
parsed_args[value] = arg[defs.DEFAULT]
elif arg[defs.REQUIRED]: # requires field is true
"""parameter was not supplied by user, but it's required and has no default value"""
raise exceptions.RequiredFieldMissing(value)
return parsed_args | [
"def",
"parse_plugin_args",
"(",
"command_args",
",",
"config_args",
")",
":",
"parsed_args",
"=",
"dict",
"(",
")",
"for",
"arg",
"in",
"command_args",
":",
"kv",
"=",
"arg",
".",
"split",
"(",
"\"=\"",
")",
"if",
"len",
"(",
"kv",
")",
"!=",
"2",
":",
"raise",
"click",
".",
"UsageError",
"(",
"\"Invalid parameter '{}', must be in key=value format\"",
".",
"format",
"(",
"arg",
")",
")",
"parsed_args",
"[",
"kv",
"[",
"0",
"]",
"]",
"=",
"config_utils",
".",
"get_truetype",
"(",
"kv",
"[",
"1",
"]",
")",
"for",
"arg",
"in",
"config_args",
":",
"value",
"=",
"arg",
"[",
"defs",
".",
"VALUE",
"]",
"value_type",
"=",
"arg",
"[",
"defs",
".",
"TYPE",
"]",
"if",
"value",
"in",
"parsed_args",
":",
"# will raise if invalid",
"config_utils",
".",
"validate_field_matches_type",
"(",
"value",
",",
"parsed_args",
"[",
"value",
"]",
",",
"value_type",
",",
"arg",
".",
"get",
"(",
"defs",
".",
"ITEMS",
")",
",",
"arg",
".",
"get",
"(",
"defs",
".",
"MIN",
")",
",",
"arg",
".",
"get",
"(",
"defs",
".",
"MAX",
")",
")",
"elif",
"defs",
".",
"DEFAULT",
"in",
"arg",
":",
"# Has a default field",
"# return default values for unset parameters",
"parsed_args",
"[",
"value",
"]",
"=",
"arg",
"[",
"defs",
".",
"DEFAULT",
"]",
"elif",
"arg",
"[",
"defs",
".",
"REQUIRED",
"]",
":",
"# requires field is true",
"\"\"\"parameter was not supplied by user, but it's required and has no default value\"\"\"",
"raise",
"exceptions",
".",
"RequiredFieldMissing",
"(",
"value",
")",
"return",
"parsed_args"
] | Parse command line arguments based on the plugin's parameters config.
:param command_args: Command line arguments as provided by the user in `key=value` format.
:param config_args: Plugin parameters parsed from config.json.
:returns: Validated dictionary of parameters that will be passed to plugin class | [
"Parse",
"command",
"line",
"arguments",
"based",
"on",
"the",
"plugin",
"s",
"parameters",
"config",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L299-L327 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | get_select_items | def get_select_items(items):
"""Return list of possible select items."""
option_items = list()
for item in items:
if isinstance(item, dict) and defs.VALUE in item and defs.LABEL in item:
option_items.append(item[defs.VALUE])
else:
raise exceptions.ParametersFieldError(item, "a dictionary with {} and {}"
.format(defs.LABEL, defs.VALUE))
return option_items | python | def get_select_items(items):
"""Return list of possible select items."""
option_items = list()
for item in items:
if isinstance(item, dict) and defs.VALUE in item and defs.LABEL in item:
option_items.append(item[defs.VALUE])
else:
raise exceptions.ParametersFieldError(item, "a dictionary with {} and {}"
.format(defs.LABEL, defs.VALUE))
return option_items | [
"def",
"get_select_items",
"(",
"items",
")",
":",
"option_items",
"=",
"list",
"(",
")",
"for",
"item",
"in",
"items",
":",
"if",
"isinstance",
"(",
"item",
",",
"dict",
")",
"and",
"defs",
".",
"VALUE",
"in",
"item",
"and",
"defs",
".",
"LABEL",
"in",
"item",
":",
"option_items",
".",
"append",
"(",
"item",
"[",
"defs",
".",
"VALUE",
"]",
")",
"else",
":",
"raise",
"exceptions",
".",
"ParametersFieldError",
"(",
"item",
",",
"\"a dictionary with {} and {}\"",
".",
"format",
"(",
"defs",
".",
"LABEL",
",",
"defs",
".",
"VALUE",
")",
")",
"return",
"option_items"
] | Return list of possible select items. | [
"Return",
"list",
"of",
"possible",
"select",
"items",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L330-L339 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | print_plugin_args | def print_plugin_args(plugin_path):
"""Print plugin parameters table."""
args = config_utils.get_config_parameters(plugin_path)
args_format = "{:20} {:10} {:^15} {:^10} {:25}"
title = args_format.format(defs.NAME.upper(), defs.TYPE.upper(), defs.DEFAULT.upper(),
defs.REQUIRED.upper(), defs.DESCRIPTION.upper())
click.secho(title)
click.secho("-" * len(title))
for arg in args:
help_text = " ({})".format(arg[defs.HELP_TEXT]) if defs.HELP_TEXT in arg else ""
options = _parse_select_options(arg)
description = arg[defs.LABEL] + options + help_text
click.secho(args_format.format(arg[defs.VALUE], arg[defs.TYPE], str(arg.get(defs.DEFAULT, None)),
str(arg.get(defs.REQUIRED, False)), description)) | python | def print_plugin_args(plugin_path):
"""Print plugin parameters table."""
args = config_utils.get_config_parameters(plugin_path)
args_format = "{:20} {:10} {:^15} {:^10} {:25}"
title = args_format.format(defs.NAME.upper(), defs.TYPE.upper(), defs.DEFAULT.upper(),
defs.REQUIRED.upper(), defs.DESCRIPTION.upper())
click.secho(title)
click.secho("-" * len(title))
for arg in args:
help_text = " ({})".format(arg[defs.HELP_TEXT]) if defs.HELP_TEXT in arg else ""
options = _parse_select_options(arg)
description = arg[defs.LABEL] + options + help_text
click.secho(args_format.format(arg[defs.VALUE], arg[defs.TYPE], str(arg.get(defs.DEFAULT, None)),
str(arg.get(defs.REQUIRED, False)), description)) | [
"def",
"print_plugin_args",
"(",
"plugin_path",
")",
":",
"args",
"=",
"config_utils",
".",
"get_config_parameters",
"(",
"plugin_path",
")",
"args_format",
"=",
"\"{:20} {:10} {:^15} {:^10} {:25}\"",
"title",
"=",
"args_format",
".",
"format",
"(",
"defs",
".",
"NAME",
".",
"upper",
"(",
")",
",",
"defs",
".",
"TYPE",
".",
"upper",
"(",
")",
",",
"defs",
".",
"DEFAULT",
".",
"upper",
"(",
")",
",",
"defs",
".",
"REQUIRED",
".",
"upper",
"(",
")",
",",
"defs",
".",
"DESCRIPTION",
".",
"upper",
"(",
")",
")",
"click",
".",
"secho",
"(",
"title",
")",
"click",
".",
"secho",
"(",
"\"-\"",
"*",
"len",
"(",
"title",
")",
")",
"for",
"arg",
"in",
"args",
":",
"help_text",
"=",
"\" ({})\"",
".",
"format",
"(",
"arg",
"[",
"defs",
".",
"HELP_TEXT",
"]",
")",
"if",
"defs",
".",
"HELP_TEXT",
"in",
"arg",
"else",
"\"\"",
"options",
"=",
"_parse_select_options",
"(",
"arg",
")",
"description",
"=",
"arg",
"[",
"defs",
".",
"LABEL",
"]",
"+",
"options",
"+",
"help_text",
"click",
".",
"secho",
"(",
"args_format",
".",
"format",
"(",
"arg",
"[",
"defs",
".",
"VALUE",
"]",
",",
"arg",
"[",
"defs",
".",
"TYPE",
"]",
",",
"str",
"(",
"arg",
".",
"get",
"(",
"defs",
".",
"DEFAULT",
",",
"None",
")",
")",
",",
"str",
"(",
"arg",
".",
"get",
"(",
"defs",
".",
"REQUIRED",
",",
"False",
")",
")",
",",
"description",
")",
")"
] | Print plugin parameters table. | [
"Print",
"plugin",
"parameters",
"table",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L354-L367 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | configure_integration | def configure_integration(path):
"""Configure and enable an integration."""
integration = register_integration(path)
integration_args = {}
try:
with open(os.path.join(path, ARGS_JSON)) as f:
integration_args = json.loads(f.read())
except Exception as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Cannot load {} integration args, please configure it first."
.format(os.path.basename(path)))
click.secho("[*] Adding integration {}".format(integration.name))
logger.debug("Adding integration %s", integration.name,
extra={"integration": integration.name, "args": integration_args})
configured_integration = ConfiguredIntegration(name=integration.name, integration=integration, path=path)
configured_integration.data = integration_args
configured_integration.integration.module = get_integration_module(path).IntegrationActionsClass(integration_args)
configured_integrations.append(configured_integration) | python | def configure_integration(path):
"""Configure and enable an integration."""
integration = register_integration(path)
integration_args = {}
try:
with open(os.path.join(path, ARGS_JSON)) as f:
integration_args = json.loads(f.read())
except Exception as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Cannot load {} integration args, please configure it first."
.format(os.path.basename(path)))
click.secho("[*] Adding integration {}".format(integration.name))
logger.debug("Adding integration %s", integration.name,
extra={"integration": integration.name, "args": integration_args})
configured_integration = ConfiguredIntegration(name=integration.name, integration=integration, path=path)
configured_integration.data = integration_args
configured_integration.integration.module = get_integration_module(path).IntegrationActionsClass(integration_args)
configured_integrations.append(configured_integration) | [
"def",
"configure_integration",
"(",
"path",
")",
":",
"integration",
"=",
"register_integration",
"(",
"path",
")",
"integration_args",
"=",
"{",
"}",
"try",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"ARGS_JSON",
")",
")",
"as",
"f",
":",
"integration_args",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
",",
"exc_info",
"=",
"True",
")",
"raise",
"click",
".",
"ClickException",
"(",
"\"Cannot load {} integration args, please configure it first.\"",
".",
"format",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
")",
")",
"click",
".",
"secho",
"(",
"\"[*] Adding integration {}\"",
".",
"format",
"(",
"integration",
".",
"name",
")",
")",
"logger",
".",
"debug",
"(",
"\"Adding integration %s\"",
",",
"integration",
".",
"name",
",",
"extra",
"=",
"{",
"\"integration\"",
":",
"integration",
".",
"name",
",",
"\"args\"",
":",
"integration_args",
"}",
")",
"configured_integration",
"=",
"ConfiguredIntegration",
"(",
"name",
"=",
"integration",
".",
"name",
",",
"integration",
"=",
"integration",
",",
"path",
"=",
"path",
")",
"configured_integration",
".",
"data",
"=",
"integration_args",
"configured_integration",
".",
"integration",
".",
"module",
"=",
"get_integration_module",
"(",
"path",
")",
".",
"IntegrationActionsClass",
"(",
"integration_args",
")",
"configured_integrations",
".",
"append",
"(",
"configured_integration",
")"
] | Configure and enable an integration. | [
"Configure",
"and",
"enable",
"an",
"integration",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L39-L58 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | send_alert_to_subscribed_integrations | def send_alert_to_subscribed_integrations(alert):
"""Send Alert to relevant integrations."""
valid_configured_integrations = get_valid_configured_integrations(alert)
for configured_integration in valid_configured_integrations:
threading.Thread(target=create_integration_alert_and_call_send, args=(alert, configured_integration)).start() | python | def send_alert_to_subscribed_integrations(alert):
"""Send Alert to relevant integrations."""
valid_configured_integrations = get_valid_configured_integrations(alert)
for configured_integration in valid_configured_integrations:
threading.Thread(target=create_integration_alert_and_call_send, args=(alert, configured_integration)).start() | [
"def",
"send_alert_to_subscribed_integrations",
"(",
"alert",
")",
":",
"valid_configured_integrations",
"=",
"get_valid_configured_integrations",
"(",
"alert",
")",
"for",
"configured_integration",
"in",
"valid_configured_integrations",
":",
"threading",
".",
"Thread",
"(",
"target",
"=",
"create_integration_alert_and_call_send",
",",
"args",
"=",
"(",
"alert",
",",
"configured_integration",
")",
")",
".",
"start",
"(",
")"
] | Send Alert to relevant integrations. | [
"Send",
"Alert",
"to",
"relevant",
"integrations",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L61-L66 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | get_valid_configured_integrations | def get_valid_configured_integrations(alert):
"""Return a list of integrations for alert filtered by alert_type.
:returns: A list of relevant integrations
"""
if not configured_integrations:
return []
# Collect all integrations that are configured for specific alert_type
# or have no specific supported_event_types (i.e., all alert types)
valid_configured_integrations = [
_ for _ in configured_integrations if _.integration.integration_type == IntegrationTypes.EVENT_OUTPUT.name and
(not _.integration.supported_event_types or alert.alert_type in _.integration.supported_event_types)
]
return valid_configured_integrations | python | def get_valid_configured_integrations(alert):
"""Return a list of integrations for alert filtered by alert_type.
:returns: A list of relevant integrations
"""
if not configured_integrations:
return []
# Collect all integrations that are configured for specific alert_type
# or have no specific supported_event_types (i.e., all alert types)
valid_configured_integrations = [
_ for _ in configured_integrations if _.integration.integration_type == IntegrationTypes.EVENT_OUTPUT.name and
(not _.integration.supported_event_types or alert.alert_type in _.integration.supported_event_types)
]
return valid_configured_integrations | [
"def",
"get_valid_configured_integrations",
"(",
"alert",
")",
":",
"if",
"not",
"configured_integrations",
":",
"return",
"[",
"]",
"# Collect all integrations that are configured for specific alert_type",
"# or have no specific supported_event_types (i.e., all alert types)",
"valid_configured_integrations",
"=",
"[",
"_",
"for",
"_",
"in",
"configured_integrations",
"if",
"_",
".",
"integration",
".",
"integration_type",
"==",
"IntegrationTypes",
".",
"EVENT_OUTPUT",
".",
"name",
"and",
"(",
"not",
"_",
".",
"integration",
".",
"supported_event_types",
"or",
"alert",
".",
"alert_type",
"in",
"_",
".",
"integration",
".",
"supported_event_types",
")",
"]",
"return",
"valid_configured_integrations"
] | Return a list of integrations for alert filtered by alert_type.
:returns: A list of relevant integrations | [
"Return",
"a",
"list",
"of",
"integrations",
"for",
"alert",
"filtered",
"by",
"alert_type",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L74-L89 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | create_integration_alert_and_call_send | def create_integration_alert_and_call_send(alert, configured_integration):
"""Create an IntegrationAlert object and send it to Integration."""
integration_alert = IntegrationAlert(
alert=alert,
configured_integration=configured_integration,
status=IntegrationAlertStatuses.PENDING.name,
retries=configured_integration.integration.max_send_retries
)
send_alert_to_configured_integration(integration_alert) | python | def create_integration_alert_and_call_send(alert, configured_integration):
"""Create an IntegrationAlert object and send it to Integration."""
integration_alert = IntegrationAlert(
alert=alert,
configured_integration=configured_integration,
status=IntegrationAlertStatuses.PENDING.name,
retries=configured_integration.integration.max_send_retries
)
send_alert_to_configured_integration(integration_alert) | [
"def",
"create_integration_alert_and_call_send",
"(",
"alert",
",",
"configured_integration",
")",
":",
"integration_alert",
"=",
"IntegrationAlert",
"(",
"alert",
"=",
"alert",
",",
"configured_integration",
"=",
"configured_integration",
",",
"status",
"=",
"IntegrationAlertStatuses",
".",
"PENDING",
".",
"name",
",",
"retries",
"=",
"configured_integration",
".",
"integration",
".",
"max_send_retries",
")",
"send_alert_to_configured_integration",
"(",
"integration_alert",
")"
] | Create an IntegrationAlert object and send it to Integration. | [
"Create",
"an",
"IntegrationAlert",
"object",
"and",
"send",
"it",
"to",
"Integration",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L92-L101 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | send_alert_to_configured_integration | def send_alert_to_configured_integration(integration_alert):
"""Send IntegrationAlert to configured integration."""
try:
alert = integration_alert.alert
configured_integration = integration_alert.configured_integration
integration = configured_integration.integration
integration_actions_instance = configured_integration.integration.module
alert_fields = dict()
if integration.required_fields:
if not all([hasattr(alert, _) for _ in integration.required_fields]):
logger.debug("Alert does not have all required_fields (%s) for integration %s, skipping",
integration.required_fields,
integration.name)
return
exclude_fields = ["alert_type", "service_type"]
alert_fields = {}
for field in alert.__slots__:
if hasattr(alert, field) and field not in exclude_fields:
alert_fields[field] = getattr(alert, field)
logger.debug("Sending alert %s to %s", alert_fields, integration.name)
output_data, output_file_content = integration_actions_instance.send_event(alert_fields)
if integration.polling_enabled:
integration_alert.status = IntegrationAlertStatuses.POLLING.name
polling_integration_alerts.append(integration_alert)
else:
integration_alert.status = IntegrationAlertStatuses.DONE.name
integration_alert.send_time = get_current_datetime_utc()
integration_alert.output_data = json.dumps(output_data)
# TODO: do something with successfully handled alerts? They are all written to debug log file
except exceptions.IntegrationMissingRequiredFieldError as exc:
logger.exception("Send response formatting for integration alert %s failed. Missing required fields",
integration_alert,
exc.message)
integration_alert.status = IntegrationAlertStatuses.ERROR_MISSING_SEND_FIELDS.name
except exceptions.IntegrationOutputFormatError:
logger.exception("Send response formatting for integration alert %s failed", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_SENDING_FORMATTING.name
except exceptions.IntegrationSendEventError as exc:
integration_send_retries = integration_alert.retries if integration_alert.retries <= MAX_SEND_RETRIES \
else MAX_SEND_RETRIES # making sure we do not exceed celery max retries
send_retries_left = integration_send_retries - 1
integration_alert.retries = send_retries_left
logger.error("Sending integration alert %s failed. Message: %s. Retries left: %s",
integration_alert,
exc.message,
send_retries_left)
if send_retries_left == 0:
integration_alert.status = IntegrationAlertStatuses.ERROR_SENDING.name
if send_retries_left > 0:
sleep(SEND_ALERT_DATA_INTERVAL)
send_alert_to_configured_integration(integration_alert) | python | def send_alert_to_configured_integration(integration_alert):
"""Send IntegrationAlert to configured integration."""
try:
alert = integration_alert.alert
configured_integration = integration_alert.configured_integration
integration = configured_integration.integration
integration_actions_instance = configured_integration.integration.module
alert_fields = dict()
if integration.required_fields:
if not all([hasattr(alert, _) for _ in integration.required_fields]):
logger.debug("Alert does not have all required_fields (%s) for integration %s, skipping",
integration.required_fields,
integration.name)
return
exclude_fields = ["alert_type", "service_type"]
alert_fields = {}
for field in alert.__slots__:
if hasattr(alert, field) and field not in exclude_fields:
alert_fields[field] = getattr(alert, field)
logger.debug("Sending alert %s to %s", alert_fields, integration.name)
output_data, output_file_content = integration_actions_instance.send_event(alert_fields)
if integration.polling_enabled:
integration_alert.status = IntegrationAlertStatuses.POLLING.name
polling_integration_alerts.append(integration_alert)
else:
integration_alert.status = IntegrationAlertStatuses.DONE.name
integration_alert.send_time = get_current_datetime_utc()
integration_alert.output_data = json.dumps(output_data)
# TODO: do something with successfully handled alerts? They are all written to debug log file
except exceptions.IntegrationMissingRequiredFieldError as exc:
logger.exception("Send response formatting for integration alert %s failed. Missing required fields",
integration_alert,
exc.message)
integration_alert.status = IntegrationAlertStatuses.ERROR_MISSING_SEND_FIELDS.name
except exceptions.IntegrationOutputFormatError:
logger.exception("Send response formatting for integration alert %s failed", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_SENDING_FORMATTING.name
except exceptions.IntegrationSendEventError as exc:
integration_send_retries = integration_alert.retries if integration_alert.retries <= MAX_SEND_RETRIES \
else MAX_SEND_RETRIES # making sure we do not exceed celery max retries
send_retries_left = integration_send_retries - 1
integration_alert.retries = send_retries_left
logger.error("Sending integration alert %s failed. Message: %s. Retries left: %s",
integration_alert,
exc.message,
send_retries_left)
if send_retries_left == 0:
integration_alert.status = IntegrationAlertStatuses.ERROR_SENDING.name
if send_retries_left > 0:
sleep(SEND_ALERT_DATA_INTERVAL)
send_alert_to_configured_integration(integration_alert) | [
"def",
"send_alert_to_configured_integration",
"(",
"integration_alert",
")",
":",
"try",
":",
"alert",
"=",
"integration_alert",
".",
"alert",
"configured_integration",
"=",
"integration_alert",
".",
"configured_integration",
"integration",
"=",
"configured_integration",
".",
"integration",
"integration_actions_instance",
"=",
"configured_integration",
".",
"integration",
".",
"module",
"alert_fields",
"=",
"dict",
"(",
")",
"if",
"integration",
".",
"required_fields",
":",
"if",
"not",
"all",
"(",
"[",
"hasattr",
"(",
"alert",
",",
"_",
")",
"for",
"_",
"in",
"integration",
".",
"required_fields",
"]",
")",
":",
"logger",
".",
"debug",
"(",
"\"Alert does not have all required_fields (%s) for integration %s, skipping\"",
",",
"integration",
".",
"required_fields",
",",
"integration",
".",
"name",
")",
"return",
"exclude_fields",
"=",
"[",
"\"alert_type\"",
",",
"\"service_type\"",
"]",
"alert_fields",
"=",
"{",
"}",
"for",
"field",
"in",
"alert",
".",
"__slots__",
":",
"if",
"hasattr",
"(",
"alert",
",",
"field",
")",
"and",
"field",
"not",
"in",
"exclude_fields",
":",
"alert_fields",
"[",
"field",
"]",
"=",
"getattr",
"(",
"alert",
",",
"field",
")",
"logger",
".",
"debug",
"(",
"\"Sending alert %s to %s\"",
",",
"alert_fields",
",",
"integration",
".",
"name",
")",
"output_data",
",",
"output_file_content",
"=",
"integration_actions_instance",
".",
"send_event",
"(",
"alert_fields",
")",
"if",
"integration",
".",
"polling_enabled",
":",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"POLLING",
".",
"name",
"polling_integration_alerts",
".",
"append",
"(",
"integration_alert",
")",
"else",
":",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"DONE",
".",
"name",
"integration_alert",
".",
"send_time",
"=",
"get_current_datetime_utc",
"(",
")",
"integration_alert",
".",
"output_data",
"=",
"json",
".",
"dumps",
"(",
"output_data",
")",
"# TODO: do something with successfully handled alerts? They are all written to debug log file",
"except",
"exceptions",
".",
"IntegrationMissingRequiredFieldError",
"as",
"exc",
":",
"logger",
".",
"exception",
"(",
"\"Send response formatting for integration alert %s failed. Missing required fields\"",
",",
"integration_alert",
",",
"exc",
".",
"message",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_MISSING_SEND_FIELDS",
".",
"name",
"except",
"exceptions",
".",
"IntegrationOutputFormatError",
":",
"logger",
".",
"exception",
"(",
"\"Send response formatting for integration alert %s failed\"",
",",
"integration_alert",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_SENDING_FORMATTING",
".",
"name",
"except",
"exceptions",
".",
"IntegrationSendEventError",
"as",
"exc",
":",
"integration_send_retries",
"=",
"integration_alert",
".",
"retries",
"if",
"integration_alert",
".",
"retries",
"<=",
"MAX_SEND_RETRIES",
"else",
"MAX_SEND_RETRIES",
"# making sure we do not exceed celery max retries",
"send_retries_left",
"=",
"integration_send_retries",
"-",
"1",
"integration_alert",
".",
"retries",
"=",
"send_retries_left",
"logger",
".",
"error",
"(",
"\"Sending integration alert %s failed. Message: %s. Retries left: %s\"",
",",
"integration_alert",
",",
"exc",
".",
"message",
",",
"send_retries_left",
")",
"if",
"send_retries_left",
"==",
"0",
":",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_SENDING",
".",
"name",
"if",
"send_retries_left",
">",
"0",
":",
"sleep",
"(",
"SEND_ALERT_DATA_INTERVAL",
")",
"send_alert_to_configured_integration",
"(",
"integration_alert",
")"
] | Send IntegrationAlert to configured integration. | [
"Send",
"IntegrationAlert",
"to",
"configured",
"integration",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L104-L167 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/tasks.py | poll_integration_alert_data | def poll_integration_alert_data(integration_alert):
"""Poll for updates on waiting IntegrationAlerts."""
logger.info("Polling information for integration alert %s", integration_alert)
try:
configured_integration = integration_alert.configured_integration
integration_actions_instance = configured_integration.integration.module
output_data, output_file_content = integration_actions_instance.poll_for_updates(
json.loads(integration_alert.output_data)
)
integration_alert.status = IntegrationAlertStatuses.DONE.name
integration_alert.output_data = json.dumps(output_data)
polling_integration_alerts.remove(integration_alert)
except exceptions.IntegrationNoMethodImplementationError:
logger.error("No poll_for_updates function found for integration alert %s", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name
except exceptions.IntegrationPollEventError:
# This does not always indicate an error, this is also raised when need to try again later
logger.debug("Polling for integration alert %s failed", integration_alert)
except exceptions.IntegrationOutputFormatError:
logger.error("Integration alert %s formatting error", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING_FORMATTING.name
except Exception:
logger.exception("Error polling integration alert %s", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name | python | def poll_integration_alert_data(integration_alert):
"""Poll for updates on waiting IntegrationAlerts."""
logger.info("Polling information for integration alert %s", integration_alert)
try:
configured_integration = integration_alert.configured_integration
integration_actions_instance = configured_integration.integration.module
output_data, output_file_content = integration_actions_instance.poll_for_updates(
json.loads(integration_alert.output_data)
)
integration_alert.status = IntegrationAlertStatuses.DONE.name
integration_alert.output_data = json.dumps(output_data)
polling_integration_alerts.remove(integration_alert)
except exceptions.IntegrationNoMethodImplementationError:
logger.error("No poll_for_updates function found for integration alert %s", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name
except exceptions.IntegrationPollEventError:
# This does not always indicate an error, this is also raised when need to try again later
logger.debug("Polling for integration alert %s failed", integration_alert)
except exceptions.IntegrationOutputFormatError:
logger.error("Integration alert %s formatting error", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING_FORMATTING.name
except Exception:
logger.exception("Error polling integration alert %s", integration_alert)
integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name | [
"def",
"poll_integration_alert_data",
"(",
"integration_alert",
")",
":",
"logger",
".",
"info",
"(",
"\"Polling information for integration alert %s\"",
",",
"integration_alert",
")",
"try",
":",
"configured_integration",
"=",
"integration_alert",
".",
"configured_integration",
"integration_actions_instance",
"=",
"configured_integration",
".",
"integration",
".",
"module",
"output_data",
",",
"output_file_content",
"=",
"integration_actions_instance",
".",
"poll_for_updates",
"(",
"json",
".",
"loads",
"(",
"integration_alert",
".",
"output_data",
")",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"DONE",
".",
"name",
"integration_alert",
".",
"output_data",
"=",
"json",
".",
"dumps",
"(",
"output_data",
")",
"polling_integration_alerts",
".",
"remove",
"(",
"integration_alert",
")",
"except",
"exceptions",
".",
"IntegrationNoMethodImplementationError",
":",
"logger",
".",
"error",
"(",
"\"No poll_for_updates function found for integration alert %s\"",
",",
"integration_alert",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_POLLING",
".",
"name",
"except",
"exceptions",
".",
"IntegrationPollEventError",
":",
"# This does not always indicate an error, this is also raised when need to try again later",
"logger",
".",
"debug",
"(",
"\"Polling for integration alert %s failed\"",
",",
"integration_alert",
")",
"except",
"exceptions",
".",
"IntegrationOutputFormatError",
":",
"logger",
".",
"error",
"(",
"\"Integration alert %s formatting error\"",
",",
"integration_alert",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_POLLING_FORMATTING",
".",
"name",
"except",
"Exception",
":",
"logger",
".",
"exception",
"(",
"\"Error polling integration alert %s\"",
",",
"integration_alert",
")",
"integration_alert",
".",
"status",
"=",
"IntegrationAlertStatuses",
".",
"ERROR_POLLING",
".",
"name"
] | Poll for updates on waiting IntegrationAlerts. | [
"Poll",
"for",
"updates",
"on",
"waiting",
"IntegrationAlerts",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/tasks.py#L191-L223 | train |
Cymmetria/honeycomb | honeycomb/utils/wait.py | wait_until | def wait_until(func,
check_return_value=True,
total_timeout=60,
interval=0.5,
exc_list=None,
error_message="",
*args,
**kwargs):
"""Run a command in a loop until desired result or timeout occurs.
:param func: Function to call and wait for
:param bool check_return_value: Examine return value
:param int total_timeout: Wait timeout,
:param float interval: Sleep interval between retries
:param list exc_list: Acceptable exception list
:param str error_message: Default error messages
:param args: args to pass to func
:param kwargs: lwargs to pass to fun
"""
start_function = time.time()
while time.time() - start_function < total_timeout:
try:
logger.debug("executing {} with args {} {}".format(func, args, kwargs))
return_value = func(*args, **kwargs)
if not check_return_value or (check_return_value and return_value):
return return_value
except Exception as exc:
if exc_list and any([isinstance(exc, x) for x in exc_list]):
pass
else:
raise
time.sleep(interval)
raise TimeoutException(error_message) | python | def wait_until(func,
check_return_value=True,
total_timeout=60,
interval=0.5,
exc_list=None,
error_message="",
*args,
**kwargs):
"""Run a command in a loop until desired result or timeout occurs.
:param func: Function to call and wait for
:param bool check_return_value: Examine return value
:param int total_timeout: Wait timeout,
:param float interval: Sleep interval between retries
:param list exc_list: Acceptable exception list
:param str error_message: Default error messages
:param args: args to pass to func
:param kwargs: lwargs to pass to fun
"""
start_function = time.time()
while time.time() - start_function < total_timeout:
try:
logger.debug("executing {} with args {} {}".format(func, args, kwargs))
return_value = func(*args, **kwargs)
if not check_return_value or (check_return_value and return_value):
return return_value
except Exception as exc:
if exc_list and any([isinstance(exc, x) for x in exc_list]):
pass
else:
raise
time.sleep(interval)
raise TimeoutException(error_message) | [
"def",
"wait_until",
"(",
"func",
",",
"check_return_value",
"=",
"True",
",",
"total_timeout",
"=",
"60",
",",
"interval",
"=",
"0.5",
",",
"exc_list",
"=",
"None",
",",
"error_message",
"=",
"\"\"",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"start_function",
"=",
"time",
".",
"time",
"(",
")",
"while",
"time",
".",
"time",
"(",
")",
"-",
"start_function",
"<",
"total_timeout",
":",
"try",
":",
"logger",
".",
"debug",
"(",
"\"executing {} with args {} {}\"",
".",
"format",
"(",
"func",
",",
"args",
",",
"kwargs",
")",
")",
"return_value",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"check_return_value",
"or",
"(",
"check_return_value",
"and",
"return_value",
")",
":",
"return",
"return_value",
"except",
"Exception",
"as",
"exc",
":",
"if",
"exc_list",
"and",
"any",
"(",
"[",
"isinstance",
"(",
"exc",
",",
"x",
")",
"for",
"x",
"in",
"exc_list",
"]",
")",
":",
"pass",
"else",
":",
"raise",
"time",
".",
"sleep",
"(",
"interval",
")",
"raise",
"TimeoutException",
"(",
"error_message",
")"
] | Run a command in a loop until desired result or timeout occurs.
:param func: Function to call and wait for
:param bool check_return_value: Examine return value
:param int total_timeout: Wait timeout,
:param float interval: Sleep interval between retries
:param list exc_list: Acceptable exception list
:param str error_message: Default error messages
:param args: args to pass to func
:param kwargs: lwargs to pass to fun | [
"Run",
"a",
"command",
"in",
"a",
"loop",
"until",
"desired",
"result",
"or",
"timeout",
"occurs",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/wait.py#L19-L55 | train |
Cymmetria/honeycomb | honeycomb/utils/wait.py | search_json_log | def search_json_log(filepath, key, value):
"""Search json log file for a key=value pair.
:param filepath: Valid path to a json file
:param key: key to match
:param value: value to match
:returns: First matching line in json log file, parsed by :py:func:`json.loads`
"""
try:
with open(filepath, "r") as fh:
for line in fh.readlines():
log = json.loads(line)
if key in log and log[key] == value:
return log
except IOError:
pass
return False | python | def search_json_log(filepath, key, value):
"""Search json log file for a key=value pair.
:param filepath: Valid path to a json file
:param key: key to match
:param value: value to match
:returns: First matching line in json log file, parsed by :py:func:`json.loads`
"""
try:
with open(filepath, "r") as fh:
for line in fh.readlines():
log = json.loads(line)
if key in log and log[key] == value:
return log
except IOError:
pass
return False | [
"def",
"search_json_log",
"(",
"filepath",
",",
"key",
",",
"value",
")",
":",
"try",
":",
"with",
"open",
"(",
"filepath",
",",
"\"r\"",
")",
"as",
"fh",
":",
"for",
"line",
"in",
"fh",
".",
"readlines",
"(",
")",
":",
"log",
"=",
"json",
".",
"loads",
"(",
"line",
")",
"if",
"key",
"in",
"log",
"and",
"log",
"[",
"key",
"]",
"==",
"value",
":",
"return",
"log",
"except",
"IOError",
":",
"pass",
"return",
"False"
] | Search json log file for a key=value pair.
:param filepath: Valid path to a json file
:param key: key to match
:param value: value to match
:returns: First matching line in json log file, parsed by :py:func:`json.loads` | [
"Search",
"json",
"log",
"file",
"for",
"a",
"key",
"=",
"value",
"pair",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/wait.py#L58-L74 | train |
Cymmetria/honeycomb | honeycomb/commands/__init__.py | MyGroup.list_commands | def list_commands(self, ctx):
"""List commands from folder."""
rv = []
files = [_ for _ in next(os.walk(self.folder))[2] if not _.startswith("_") and _.endswith(".py")]
for filename in files:
rv.append(filename[:-3])
rv.sort()
return rv | python | def list_commands(self, ctx):
"""List commands from folder."""
rv = []
files = [_ for _ in next(os.walk(self.folder))[2] if not _.startswith("_") and _.endswith(".py")]
for filename in files:
rv.append(filename[:-3])
rv.sort()
return rv | [
"def",
"list_commands",
"(",
"self",
",",
"ctx",
")",
":",
"rv",
"=",
"[",
"]",
"files",
"=",
"[",
"_",
"for",
"_",
"in",
"next",
"(",
"os",
".",
"walk",
"(",
"self",
".",
"folder",
")",
")",
"[",
"2",
"]",
"if",
"not",
"_",
".",
"startswith",
"(",
"\"_\"",
")",
"and",
"_",
".",
"endswith",
"(",
"\".py\"",
")",
"]",
"for",
"filename",
"in",
"files",
":",
"rv",
".",
"append",
"(",
"filename",
"[",
":",
"-",
"3",
"]",
")",
"rv",
".",
"sort",
"(",
")",
"return",
"rv"
] | List commands from folder. | [
"List",
"commands",
"from",
"folder",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/__init__.py#L27-L34 | train |
Cymmetria/honeycomb | honeycomb/commands/__init__.py | MyGroup.get_command | def get_command(self, ctx, name):
"""Fetch command from folder."""
plugin = os.path.basename(self.folder)
try:
command = importlib.import_module("honeycomb.commands.{}.{}".format(plugin, name))
except ImportError:
raise click.UsageError("No such command {} {}\n\n{}".format(plugin, name, self.get_help(ctx)))
return getattr(command, name) | python | def get_command(self, ctx, name):
"""Fetch command from folder."""
plugin = os.path.basename(self.folder)
try:
command = importlib.import_module("honeycomb.commands.{}.{}".format(plugin, name))
except ImportError:
raise click.UsageError("No such command {} {}\n\n{}".format(plugin, name, self.get_help(ctx)))
return getattr(command, name) | [
"def",
"get_command",
"(",
"self",
",",
"ctx",
",",
"name",
")",
":",
"plugin",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"folder",
")",
"try",
":",
"command",
"=",
"importlib",
".",
"import_module",
"(",
"\"honeycomb.commands.{}.{}\"",
".",
"format",
"(",
"plugin",
",",
"name",
")",
")",
"except",
"ImportError",
":",
"raise",
"click",
".",
"UsageError",
"(",
"\"No such command {} {}\\n\\n{}\"",
".",
"format",
"(",
"plugin",
",",
"name",
",",
"self",
".",
"get_help",
"(",
"ctx",
")",
")",
")",
"return",
"getattr",
"(",
"command",
",",
"name",
")"
] | Fetch command from folder. | [
"Fetch",
"command",
"from",
"folder",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/__init__.py#L36-L43 | train |
Cymmetria/honeycomb | honeycomb/cli.py | cli | def cli(ctx, home, iamroot, config, verbose):
"""Honeycomb is a honeypot framework."""
_mkhome(home)
setup_logging(home, verbose)
logger.debug("Honeycomb v%s", __version__, extra={"version": __version__})
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
try:
is_admin = os.getuid() == 0
except AttributeError:
is_admin = ctypes.windll.shell32.IsUserAnAdmin()
if is_admin:
if not iamroot:
raise click.ClickException("Honeycomb should not run as a privileged user, if you are just "
"trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' "
"$(which honeycomb)` instead. If you insist, use --iamroot")
logger.warn("running as root!")
ctx.obj["HOME"] = home
logger.debug("ctx: {}".format(ctx.obj))
if config:
return process_config(ctx, config) | python | def cli(ctx, home, iamroot, config, verbose):
"""Honeycomb is a honeypot framework."""
_mkhome(home)
setup_logging(home, verbose)
logger.debug("Honeycomb v%s", __version__, extra={"version": __version__})
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
try:
is_admin = os.getuid() == 0
except AttributeError:
is_admin = ctypes.windll.shell32.IsUserAnAdmin()
if is_admin:
if not iamroot:
raise click.ClickException("Honeycomb should not run as a privileged user, if you are just "
"trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' "
"$(which honeycomb)` instead. If you insist, use --iamroot")
logger.warn("running as root!")
ctx.obj["HOME"] = home
logger.debug("ctx: {}".format(ctx.obj))
if config:
return process_config(ctx, config) | [
"def",
"cli",
"(",
"ctx",
",",
"home",
",",
"iamroot",
",",
"config",
",",
"verbose",
")",
":",
"_mkhome",
"(",
"home",
")",
"setup_logging",
"(",
"home",
",",
"verbose",
")",
"logger",
".",
"debug",
"(",
"\"Honeycomb v%s\"",
",",
"__version__",
",",
"extra",
"=",
"{",
"\"version\"",
":",
"__version__",
"}",
")",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"try",
":",
"is_admin",
"=",
"os",
".",
"getuid",
"(",
")",
"==",
"0",
"except",
"AttributeError",
":",
"is_admin",
"=",
"ctypes",
".",
"windll",
".",
"shell32",
".",
"IsUserAnAdmin",
"(",
")",
"if",
"is_admin",
":",
"if",
"not",
"iamroot",
":",
"raise",
"click",
".",
"ClickException",
"(",
"\"Honeycomb should not run as a privileged user, if you are just \"",
"\"trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' \"",
"\"$(which honeycomb)` instead. If you insist, use --iamroot\"",
")",
"logger",
".",
"warn",
"(",
"\"running as root!\"",
")",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"=",
"home",
"logger",
".",
"debug",
"(",
"\"ctx: {}\"",
".",
"format",
"(",
"ctx",
".",
"obj",
")",
")",
"if",
"config",
":",
"return",
"process_config",
"(",
"ctx",
",",
"config",
")"
] | Honeycomb is a honeypot framework. | [
"Honeycomb",
"is",
"a",
"honeypot",
"framework",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/cli.py#L40-L66 | train |
Cymmetria/honeycomb | honeycomb/cli.py | setup_logging | def setup_logging(home, verbose):
"""Configure logging for honeycomb."""
logging.setLoggerClass(MyLogger)
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"console": {
"format": "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s",
},
"json": {
"()": jsonlogger.JsonFormatter,
"format": "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s",
},
},
"handlers": {
"default": {
"level": "DEBUG" if verbose else "INFO",
"class": "logging.StreamHandler",
"formatter": "console",
},
"file": {
"level": "DEBUG",
"class": "logging.handlers.WatchedFileHandler",
"filename": os.path.join(home, DEBUG_LOG_FILE),
"formatter": "json",
},
},
"loggers": {
"": {
"handlers": ["default", "file"],
"level": "DEBUG",
"propagate": True,
},
}
}) | python | def setup_logging(home, verbose):
"""Configure logging for honeycomb."""
logging.setLoggerClass(MyLogger)
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"console": {
"format": "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s",
},
"json": {
"()": jsonlogger.JsonFormatter,
"format": "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s",
},
},
"handlers": {
"default": {
"level": "DEBUG" if verbose else "INFO",
"class": "logging.StreamHandler",
"formatter": "console",
},
"file": {
"level": "DEBUG",
"class": "logging.handlers.WatchedFileHandler",
"filename": os.path.join(home, DEBUG_LOG_FILE),
"formatter": "json",
},
},
"loggers": {
"": {
"handlers": ["default", "file"],
"level": "DEBUG",
"propagate": True,
},
}
}) | [
"def",
"setup_logging",
"(",
"home",
",",
"verbose",
")",
":",
"logging",
".",
"setLoggerClass",
"(",
"MyLogger",
")",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"{",
"\"version\"",
":",
"1",
",",
"\"disable_existing_loggers\"",
":",
"False",
",",
"\"formatters\"",
":",
"{",
"\"console\"",
":",
"{",
"\"format\"",
":",
"\"%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s\"",
",",
"}",
",",
"\"json\"",
":",
"{",
"\"()\"",
":",
"jsonlogger",
".",
"JsonFormatter",
",",
"\"format\"",
":",
"\"%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s\"",
",",
"}",
",",
"}",
",",
"\"handlers\"",
":",
"{",
"\"default\"",
":",
"{",
"\"level\"",
":",
"\"DEBUG\"",
"if",
"verbose",
"else",
"\"INFO\"",
",",
"\"class\"",
":",
"\"logging.StreamHandler\"",
",",
"\"formatter\"",
":",
"\"console\"",
",",
"}",
",",
"\"file\"",
":",
"{",
"\"level\"",
":",
"\"DEBUG\"",
",",
"\"class\"",
":",
"\"logging.handlers.WatchedFileHandler\"",
",",
"\"filename\"",
":",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"DEBUG_LOG_FILE",
")",
",",
"\"formatter\"",
":",
"\"json\"",
",",
"}",
",",
"}",
",",
"\"loggers\"",
":",
"{",
"\"\"",
":",
"{",
"\"handlers\"",
":",
"[",
"\"default\"",
",",
"\"file\"",
"]",
",",
"\"level\"",
":",
"\"DEBUG\"",
",",
"\"propagate\"",
":",
"True",
",",
"}",
",",
"}",
"}",
")"
] | Configure logging for honeycomb. | [
"Configure",
"logging",
"for",
"honeycomb",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/cli.py#L91-L126 | train |
Cymmetria/honeycomb | honeycomb/cli.py | MyLogger.makeRecord | def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None):
"""Override default logger to allow overriding of internal attributes."""
# See below commented section for a simple example of what the docstring refers to
if six.PY2:
rv = logging.LogRecord(name, level, fn, lno, msg, args, exc_info, func)
else:
rv = logging.LogRecord(name, level, fn, lno, msg, args, exc_info, func, sinfo)
if extra is None:
extra = dict()
extra.update({"pid": os.getpid(), "uid": os.getuid(), "gid": os.getgid(), "ppid": os.getppid()})
for key in extra:
# if (key in ["message", "asctime"]) or (key in rv.__dict__):
# raise KeyError("Attempt to overwrite %r in LogRecord" % key)
rv.__dict__[key] = extra[key]
return rv | python | def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None):
"""Override default logger to allow overriding of internal attributes."""
# See below commented section for a simple example of what the docstring refers to
if six.PY2:
rv = logging.LogRecord(name, level, fn, lno, msg, args, exc_info, func)
else:
rv = logging.LogRecord(name, level, fn, lno, msg, args, exc_info, func, sinfo)
if extra is None:
extra = dict()
extra.update({"pid": os.getpid(), "uid": os.getuid(), "gid": os.getgid(), "ppid": os.getppid()})
for key in extra:
# if (key in ["message", "asctime"]) or (key in rv.__dict__):
# raise KeyError("Attempt to overwrite %r in LogRecord" % key)
rv.__dict__[key] = extra[key]
return rv | [
"def",
"makeRecord",
"(",
"self",
",",
"name",
",",
"level",
",",
"fn",
",",
"lno",
",",
"msg",
",",
"args",
",",
"exc_info",
",",
"func",
"=",
"None",
",",
"extra",
"=",
"None",
",",
"sinfo",
"=",
"None",
")",
":",
"# See below commented section for a simple example of what the docstring refers to",
"if",
"six",
".",
"PY2",
":",
"rv",
"=",
"logging",
".",
"LogRecord",
"(",
"name",
",",
"level",
",",
"fn",
",",
"lno",
",",
"msg",
",",
"args",
",",
"exc_info",
",",
"func",
")",
"else",
":",
"rv",
"=",
"logging",
".",
"LogRecord",
"(",
"name",
",",
"level",
",",
"fn",
",",
"lno",
",",
"msg",
",",
"args",
",",
"exc_info",
",",
"func",
",",
"sinfo",
")",
"if",
"extra",
"is",
"None",
":",
"extra",
"=",
"dict",
"(",
")",
"extra",
".",
"update",
"(",
"{",
"\"pid\"",
":",
"os",
".",
"getpid",
"(",
")",
",",
"\"uid\"",
":",
"os",
".",
"getuid",
"(",
")",
",",
"\"gid\"",
":",
"os",
".",
"getgid",
"(",
")",
",",
"\"ppid\"",
":",
"os",
".",
"getppid",
"(",
")",
"}",
")",
"for",
"key",
"in",
"extra",
":",
"# if (key in [\"message\", \"asctime\"]) or (key in rv.__dict__):",
"# raise KeyError(\"Attempt to overwrite %r in LogRecord\" % key)",
"rv",
".",
"__dict__",
"[",
"key",
"]",
"=",
"extra",
"[",
"key",
"]",
"return",
"rv"
] | Override default logger to allow overriding of internal attributes. | [
"Override",
"default",
"logger",
"to",
"allow",
"overriding",
"of",
"internal",
"attributes",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/cli.py#L72-L88 | train |
Cymmetria/honeycomb | honeycomb/commands/service/stop.py | stop | def stop(ctx, service, editable):
"""Stop a running service daemon."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
service_path = plugin_utils.get_plugin_path(home, SERVICES, service, editable)
logger.debug("loading {}".format(service))
service = register_service(service_path)
try:
with open(os.path.join(service_path, ARGS_JSON)) as f:
service_args = json.loads(f.read())
except IOError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Cannot load service args, are you sure server is running?")
# get our service class instance
service_module = get_service_module(service_path)
service_obj = service_module.service_class(alert_types=service.alert_types, service_args=service_args)
# prepare runner
runner = myRunner(service_obj,
pidfile=service_path + ".pid",
stdout=open(os.path.join(service_path, "stdout.log"), "ab"),
stderr=open(os.path.join(service_path, "stderr.log"), "ab"))
click.secho("[*] Stopping {}".format(service.name))
try:
runner._stop()
except daemon.runner.DaemonRunnerStopFailureError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Unable to stop service, are you sure it is running?") | python | def stop(ctx, service, editable):
"""Stop a running service daemon."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
service_path = plugin_utils.get_plugin_path(home, SERVICES, service, editable)
logger.debug("loading {}".format(service))
service = register_service(service_path)
try:
with open(os.path.join(service_path, ARGS_JSON)) as f:
service_args = json.loads(f.read())
except IOError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Cannot load service args, are you sure server is running?")
# get our service class instance
service_module = get_service_module(service_path)
service_obj = service_module.service_class(alert_types=service.alert_types, service_args=service_args)
# prepare runner
runner = myRunner(service_obj,
pidfile=service_path + ".pid",
stdout=open(os.path.join(service_path, "stdout.log"), "ab"),
stderr=open(os.path.join(service_path, "stderr.log"), "ab"))
click.secho("[*] Stopping {}".format(service.name))
try:
runner._stop()
except daemon.runner.DaemonRunnerStopFailureError as exc:
logger.debug(str(exc), exc_info=True)
raise click.ClickException("Unable to stop service, are you sure it is running?") | [
"def",
"stop",
"(",
"ctx",
",",
"service",
",",
"editable",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"service_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"SERVICES",
",",
"service",
",",
"editable",
")",
"logger",
".",
"debug",
"(",
"\"loading {}\"",
".",
"format",
"(",
"service",
")",
")",
"service",
"=",
"register_service",
"(",
"service_path",
")",
"try",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"ARGS_JSON",
")",
")",
"as",
"f",
":",
"service_args",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"except",
"IOError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
",",
"exc_info",
"=",
"True",
")",
"raise",
"click",
".",
"ClickException",
"(",
"\"Cannot load service args, are you sure server is running?\"",
")",
"# get our service class instance",
"service_module",
"=",
"get_service_module",
"(",
"service_path",
")",
"service_obj",
"=",
"service_module",
".",
"service_class",
"(",
"alert_types",
"=",
"service",
".",
"alert_types",
",",
"service_args",
"=",
"service_args",
")",
"# prepare runner",
"runner",
"=",
"myRunner",
"(",
"service_obj",
",",
"pidfile",
"=",
"service_path",
"+",
"\".pid\"",
",",
"stdout",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"\"stdout.log\"",
")",
",",
"\"ab\"",
")",
",",
"stderr",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"\"stderr.log\"",
")",
",",
"\"ab\"",
")",
")",
"click",
".",
"secho",
"(",
"\"[*] Stopping {}\"",
".",
"format",
"(",
"service",
".",
"name",
")",
")",
"try",
":",
"runner",
".",
"_stop",
"(",
")",
"except",
"daemon",
".",
"runner",
".",
"DaemonRunnerStopFailureError",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
",",
"exc_info",
"=",
"True",
")",
"raise",
"click",
".",
"ClickException",
"(",
"\"Unable to stop service, are you sure it is running?\"",
")"
] | Stop a running service daemon. | [
"Stop",
"a",
"running",
"service",
"daemon",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/service/stop.py#L24-L57 | train |
Cymmetria/honeycomb | honeycomb/commands/service/logs.py | logs | def logs(ctx, services, num, follow):
"""Show logs of daemonized service."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
services_path = os.path.join(home, SERVICES)
tail_threads = []
for service in services:
logpath = os.path.join(services_path, service, LOGS_DIR, STDOUTLOG)
if os.path.exists(logpath):
logger.debug("tailing %s", logpath)
# TODO: Print log lines from multiple services sorted by timestamp
t = threading.Thread(target=Tailer, kwargs={"name": service,
"nlines": num,
"filepath": logpath,
"follow": follow})
t.daemon = True
t.start()
tail_threads.append(t)
if tail_threads:
while tail_threads[0].isAlive():
tail_threads[0].join(0.1) | python | def logs(ctx, services, num, follow):
"""Show logs of daemonized service."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
services_path = os.path.join(home, SERVICES)
tail_threads = []
for service in services:
logpath = os.path.join(services_path, service, LOGS_DIR, STDOUTLOG)
if os.path.exists(logpath):
logger.debug("tailing %s", logpath)
# TODO: Print log lines from multiple services sorted by timestamp
t = threading.Thread(target=Tailer, kwargs={"name": service,
"nlines": num,
"filepath": logpath,
"follow": follow})
t.daemon = True
t.start()
tail_threads.append(t)
if tail_threads:
while tail_threads[0].isAlive():
tail_threads[0].join(0.1) | [
"def",
"logs",
"(",
"ctx",
",",
"services",
",",
"num",
",",
"follow",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"services_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"SERVICES",
")",
"tail_threads",
"=",
"[",
"]",
"for",
"service",
"in",
"services",
":",
"logpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"services_path",
",",
"service",
",",
"LOGS_DIR",
",",
"STDOUTLOG",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"logpath",
")",
":",
"logger",
".",
"debug",
"(",
"\"tailing %s\"",
",",
"logpath",
")",
"# TODO: Print log lines from multiple services sorted by timestamp",
"t",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"Tailer",
",",
"kwargs",
"=",
"{",
"\"name\"",
":",
"service",
",",
"\"nlines\"",
":",
"num",
",",
"\"filepath\"",
":",
"logpath",
",",
"\"follow\"",
":",
"follow",
"}",
")",
"t",
".",
"daemon",
"=",
"True",
"t",
".",
"start",
"(",
")",
"tail_threads",
".",
"append",
"(",
"t",
")",
"if",
"tail_threads",
":",
"while",
"tail_threads",
"[",
"0",
"]",
".",
"isAlive",
"(",
")",
":",
"tail_threads",
"[",
"0",
"]",
".",
"join",
"(",
"0.1",
")"
] | Show logs of daemonized service. | [
"Show",
"logs",
"of",
"daemonized",
"service",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/service/logs.py#L22-L46 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/registration.py | get_integration_module | def get_integration_module(integration_path):
"""Add custom paths to sys and import integration module.
:param integration_path: Path to integration folder
"""
# add custom paths so imports would work
paths = [
os.path.join(__file__, "..", ".."), # to import integrationmanager
os.path.join(integration_path, ".."), # to import integration itself
os.path.join(integration_path, DEPS_DIR), # to import integration deps
]
for path in paths:
path = os.path.realpath(path)
logger.debug("adding %s to path", path)
sys.path.insert(0, path)
# get our integration class instance
integration_name = os.path.basename(integration_path)
logger.debug("importing %s", ".".join([integration_name, INTEGRATION]))
return importlib.import_module(".".join([integration_name, INTEGRATION])) | python | def get_integration_module(integration_path):
"""Add custom paths to sys and import integration module.
:param integration_path: Path to integration folder
"""
# add custom paths so imports would work
paths = [
os.path.join(__file__, "..", ".."), # to import integrationmanager
os.path.join(integration_path, ".."), # to import integration itself
os.path.join(integration_path, DEPS_DIR), # to import integration deps
]
for path in paths:
path = os.path.realpath(path)
logger.debug("adding %s to path", path)
sys.path.insert(0, path)
# get our integration class instance
integration_name = os.path.basename(integration_path)
logger.debug("importing %s", ".".join([integration_name, INTEGRATION]))
return importlib.import_module(".".join([integration_name, INTEGRATION])) | [
"def",
"get_integration_module",
"(",
"integration_path",
")",
":",
"# add custom paths so imports would work",
"paths",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"__file__",
",",
"\"..\"",
",",
"\"..\"",
")",
",",
"# to import integrationmanager",
"os",
".",
"path",
".",
"join",
"(",
"integration_path",
",",
"\"..\"",
")",
",",
"# to import integration itself",
"os",
".",
"path",
".",
"join",
"(",
"integration_path",
",",
"DEPS_DIR",
")",
",",
"# to import integration deps",
"]",
"for",
"path",
"in",
"paths",
":",
"path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"path",
")",
"logger",
".",
"debug",
"(",
"\"adding %s to path\"",
",",
"path",
")",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"path",
")",
"# get our integration class instance",
"integration_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"integration_path",
")",
"logger",
".",
"debug",
"(",
"\"importing %s\"",
",",
"\".\"",
".",
"join",
"(",
"[",
"integration_name",
",",
"INTEGRATION",
"]",
")",
")",
"return",
"importlib",
".",
"import_module",
"(",
"\".\"",
".",
"join",
"(",
"[",
"integration_name",
",",
"INTEGRATION",
"]",
")",
")"
] | Add custom paths to sys and import integration module.
:param integration_path: Path to integration folder | [
"Add",
"custom",
"paths",
"to",
"sys",
"and",
"import",
"integration",
"module",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/registration.py#L25-L45 | train |
Cymmetria/honeycomb | honeycomb/integrationmanager/registration.py | register_integration | def register_integration(package_folder):
"""Register a honeycomb integration.
:param package_folder: Path to folder with integration to load
:returns: Validated integration object
:rtype: :func:`honeycomb.utils.defs.Integration`
"""
logger.debug("registering integration %s", package_folder)
package_folder = os.path.realpath(package_folder)
if not os.path.exists(package_folder):
raise IntegrationNotFound(os.path.basename(package_folder))
json_config_path = os.path.join(package_folder, CONFIG_FILE_NAME)
if not os.path.exists(json_config_path):
raise ConfigFileNotFound(json_config_path)
with open(json_config_path, "r") as f:
config_json = json.load(f)
# Validate integration and alert config
validate_config(config_json, defs.INTEGRATION_VALIDATE_CONFIG_FIELDS)
validate_config_parameters(config_json,
defs.INTEGRATION_PARAMETERS_ALLOWED_KEYS,
defs.INTEGRATION_PARAMETERS_ALLOWED_TYPES)
integration_type = _create_integration_object(config_json)
return integration_type | python | def register_integration(package_folder):
"""Register a honeycomb integration.
:param package_folder: Path to folder with integration to load
:returns: Validated integration object
:rtype: :func:`honeycomb.utils.defs.Integration`
"""
logger.debug("registering integration %s", package_folder)
package_folder = os.path.realpath(package_folder)
if not os.path.exists(package_folder):
raise IntegrationNotFound(os.path.basename(package_folder))
json_config_path = os.path.join(package_folder, CONFIG_FILE_NAME)
if not os.path.exists(json_config_path):
raise ConfigFileNotFound(json_config_path)
with open(json_config_path, "r") as f:
config_json = json.load(f)
# Validate integration and alert config
validate_config(config_json, defs.INTEGRATION_VALIDATE_CONFIG_FIELDS)
validate_config_parameters(config_json,
defs.INTEGRATION_PARAMETERS_ALLOWED_KEYS,
defs.INTEGRATION_PARAMETERS_ALLOWED_TYPES)
integration_type = _create_integration_object(config_json)
return integration_type | [
"def",
"register_integration",
"(",
"package_folder",
")",
":",
"logger",
".",
"debug",
"(",
"\"registering integration %s\"",
",",
"package_folder",
")",
"package_folder",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"package_folder",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"package_folder",
")",
":",
"raise",
"IntegrationNotFound",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"package_folder",
")",
")",
"json_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"package_folder",
",",
"CONFIG_FILE_NAME",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"json_config_path",
")",
":",
"raise",
"ConfigFileNotFound",
"(",
"json_config_path",
")",
"with",
"open",
"(",
"json_config_path",
",",
"\"r\"",
")",
"as",
"f",
":",
"config_json",
"=",
"json",
".",
"load",
"(",
"f",
")",
"# Validate integration and alert config",
"validate_config",
"(",
"config_json",
",",
"defs",
".",
"INTEGRATION_VALIDATE_CONFIG_FIELDS",
")",
"validate_config_parameters",
"(",
"config_json",
",",
"defs",
".",
"INTEGRATION_PARAMETERS_ALLOWED_KEYS",
",",
"defs",
".",
"INTEGRATION_PARAMETERS_ALLOWED_TYPES",
")",
"integration_type",
"=",
"_create_integration_object",
"(",
"config_json",
")",
"return",
"integration_type"
] | Register a honeycomb integration.
:param package_folder: Path to folder with integration to load
:returns: Validated integration object
:rtype: :func:`honeycomb.utils.defs.Integration` | [
"Register",
"a",
"honeycomb",
"integration",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/integrationmanager/registration.py#L48-L75 | train |
Cymmetria/honeycomb | honeycomb/commands/integration/list.py | list | def list(ctx, remote):
"""List integrations."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
click.secho("[*] Installed integrations:")
home = ctx.obj["HOME"]
integrations_path = os.path.join(home, INTEGRATIONS)
plugin_type = "integration"
def get_integration_details(integration_name):
logger.debug("loading {}".format(integration_name))
integration = register_integration(os.path.join(integrations_path, integration_name))
supported_event_types = integration.supported_event_types
if not supported_event_types:
supported_event_types = "All"
return "{:s} ({:s}) [Supported event types: {}]".format(integration.name, integration.description,
supported_event_types)
installed_integrations = list_local_plugins(plugin_type, integrations_path, get_integration_details)
if remote:
list_remote_plugins(installed_integrations, plugin_type)
else:
click.secho("\n[*] Try running `honeycomb integrations list -r` "
"to see integrations available from our repository") | python | def list(ctx, remote):
"""List integrations."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
click.secho("[*] Installed integrations:")
home = ctx.obj["HOME"]
integrations_path = os.path.join(home, INTEGRATIONS)
plugin_type = "integration"
def get_integration_details(integration_name):
logger.debug("loading {}".format(integration_name))
integration = register_integration(os.path.join(integrations_path, integration_name))
supported_event_types = integration.supported_event_types
if not supported_event_types:
supported_event_types = "All"
return "{:s} ({:s}) [Supported event types: {}]".format(integration.name, integration.description,
supported_event_types)
installed_integrations = list_local_plugins(plugin_type, integrations_path, get_integration_details)
if remote:
list_remote_plugins(installed_integrations, plugin_type)
else:
click.secho("\n[*] Try running `honeycomb integrations list -r` "
"to see integrations available from our repository") | [
"def",
"list",
"(",
"ctx",
",",
"remote",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"click",
".",
"secho",
"(",
"\"[*] Installed integrations:\"",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"integrations_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"INTEGRATIONS",
")",
"plugin_type",
"=",
"\"integration\"",
"def",
"get_integration_details",
"(",
"integration_name",
")",
":",
"logger",
".",
"debug",
"(",
"\"loading {}\"",
".",
"format",
"(",
"integration_name",
")",
")",
"integration",
"=",
"register_integration",
"(",
"os",
".",
"path",
".",
"join",
"(",
"integrations_path",
",",
"integration_name",
")",
")",
"supported_event_types",
"=",
"integration",
".",
"supported_event_types",
"if",
"not",
"supported_event_types",
":",
"supported_event_types",
"=",
"\"All\"",
"return",
"\"{:s} ({:s}) [Supported event types: {}]\"",
".",
"format",
"(",
"integration",
".",
"name",
",",
"integration",
".",
"description",
",",
"supported_event_types",
")",
"installed_integrations",
"=",
"list_local_plugins",
"(",
"plugin_type",
",",
"integrations_path",
",",
"get_integration_details",
")",
"if",
"remote",
":",
"list_remote_plugins",
"(",
"installed_integrations",
",",
"plugin_type",
")",
"else",
":",
"click",
".",
"secho",
"(",
"\"\\n[*] Try running `honeycomb integrations list -r` \"",
"\"to see integrations available from our repository\"",
")"
] | List integrations. | [
"List",
"integrations",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/integration/list.py#L20-L46 | train |
Cymmetria/honeycomb | honeycomb/commands/service/run.py | run | def run(ctx, service, args, show_args, daemon, editable, integration):
"""Load and run a specific service."""
home = ctx.obj["HOME"]
service_path = plugin_utils.get_plugin_path(home, SERVICES, service, editable)
service_log_path = os.path.join(service_path, LOGS_DIR)
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
logger.debug("loading {} ({})".format(service, service_path))
service = register_service(service_path)
if show_args:
return plugin_utils.print_plugin_args(service_path)
# get our service class instance
service_module = get_service_module(service_path)
service_args = plugin_utils.parse_plugin_args(args, config_utils.get_config_parameters(service_path))
service_obj = service_module.service_class(alert_types=service.alert_types, service_args=service_args)
if not os.path.exists(service_log_path):
os.mkdir(service_log_path)
# prepare runner
if daemon:
runner = myRunner(service_obj,
pidfile=service_path + ".pid",
stdout=open(os.path.join(service_log_path, STDOUTLOG), "ab"),
stderr=open(os.path.join(service_log_path, STDERRLOG), "ab"))
files_preserve = []
for handler in logging.getLogger().handlers:
if hasattr(handler, "stream"):
if hasattr(handler.stream, "fileno"):
files_preserve.append(handler.stream.fileno())
if hasattr(handler, "socket"):
files_preserve.append(handler.socket.fileno())
runner.daemon_context.files_preserve = files_preserve
runner.daemon_context.signal_map.update({
signal.SIGTERM: service_obj._on_server_shutdown,
signal.SIGINT: service_obj._on_server_shutdown,
})
logger.debug("daemon_context", extra={"daemon_context": vars(runner.daemon_context)})
for integration_name in integration:
integration_path = plugin_utils.get_plugin_path(home, INTEGRATIONS, integration_name, editable)
configure_integration(integration_path)
click.secho("[+] Launching {} {}".format(service.name, "in daemon mode" if daemon else ""))
try:
# save service_args for external reference (see test)
with open(os.path.join(service_path, ARGS_JSON), "w") as f:
f.write(json.dumps(service_args))
runner._start() if daemon else service_obj.run()
except KeyboardInterrupt:
service_obj._on_server_shutdown()
click.secho("[*] {} has stopped".format(service.name)) | python | def run(ctx, service, args, show_args, daemon, editable, integration):
"""Load and run a specific service."""
home = ctx.obj["HOME"]
service_path = plugin_utils.get_plugin_path(home, SERVICES, service, editable)
service_log_path = os.path.join(service_path, LOGS_DIR)
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
logger.debug("loading {} ({})".format(service, service_path))
service = register_service(service_path)
if show_args:
return plugin_utils.print_plugin_args(service_path)
# get our service class instance
service_module = get_service_module(service_path)
service_args = plugin_utils.parse_plugin_args(args, config_utils.get_config_parameters(service_path))
service_obj = service_module.service_class(alert_types=service.alert_types, service_args=service_args)
if not os.path.exists(service_log_path):
os.mkdir(service_log_path)
# prepare runner
if daemon:
runner = myRunner(service_obj,
pidfile=service_path + ".pid",
stdout=open(os.path.join(service_log_path, STDOUTLOG), "ab"),
stderr=open(os.path.join(service_log_path, STDERRLOG), "ab"))
files_preserve = []
for handler in logging.getLogger().handlers:
if hasattr(handler, "stream"):
if hasattr(handler.stream, "fileno"):
files_preserve.append(handler.stream.fileno())
if hasattr(handler, "socket"):
files_preserve.append(handler.socket.fileno())
runner.daemon_context.files_preserve = files_preserve
runner.daemon_context.signal_map.update({
signal.SIGTERM: service_obj._on_server_shutdown,
signal.SIGINT: service_obj._on_server_shutdown,
})
logger.debug("daemon_context", extra={"daemon_context": vars(runner.daemon_context)})
for integration_name in integration:
integration_path = plugin_utils.get_plugin_path(home, INTEGRATIONS, integration_name, editable)
configure_integration(integration_path)
click.secho("[+] Launching {} {}".format(service.name, "in daemon mode" if daemon else ""))
try:
# save service_args for external reference (see test)
with open(os.path.join(service_path, ARGS_JSON), "w") as f:
f.write(json.dumps(service_args))
runner._start() if daemon else service_obj.run()
except KeyboardInterrupt:
service_obj._on_server_shutdown()
click.secho("[*] {} has stopped".format(service.name)) | [
"def",
"run",
"(",
"ctx",
",",
"service",
",",
"args",
",",
"show_args",
",",
"daemon",
",",
"editable",
",",
"integration",
")",
":",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"service_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"SERVICES",
",",
"service",
",",
"editable",
")",
"service_log_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"LOGS_DIR",
")",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"logger",
".",
"debug",
"(",
"\"loading {} ({})\"",
".",
"format",
"(",
"service",
",",
"service_path",
")",
")",
"service",
"=",
"register_service",
"(",
"service_path",
")",
"if",
"show_args",
":",
"return",
"plugin_utils",
".",
"print_plugin_args",
"(",
"service_path",
")",
"# get our service class instance",
"service_module",
"=",
"get_service_module",
"(",
"service_path",
")",
"service_args",
"=",
"plugin_utils",
".",
"parse_plugin_args",
"(",
"args",
",",
"config_utils",
".",
"get_config_parameters",
"(",
"service_path",
")",
")",
"service_obj",
"=",
"service_module",
".",
"service_class",
"(",
"alert_types",
"=",
"service",
".",
"alert_types",
",",
"service_args",
"=",
"service_args",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"service_log_path",
")",
":",
"os",
".",
"mkdir",
"(",
"service_log_path",
")",
"# prepare runner",
"if",
"daemon",
":",
"runner",
"=",
"myRunner",
"(",
"service_obj",
",",
"pidfile",
"=",
"service_path",
"+",
"\".pid\"",
",",
"stdout",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_log_path",
",",
"STDOUTLOG",
")",
",",
"\"ab\"",
")",
",",
"stderr",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_log_path",
",",
"STDERRLOG",
")",
",",
"\"ab\"",
")",
")",
"files_preserve",
"=",
"[",
"]",
"for",
"handler",
"in",
"logging",
".",
"getLogger",
"(",
")",
".",
"handlers",
":",
"if",
"hasattr",
"(",
"handler",
",",
"\"stream\"",
")",
":",
"if",
"hasattr",
"(",
"handler",
".",
"stream",
",",
"\"fileno\"",
")",
":",
"files_preserve",
".",
"append",
"(",
"handler",
".",
"stream",
".",
"fileno",
"(",
")",
")",
"if",
"hasattr",
"(",
"handler",
",",
"\"socket\"",
")",
":",
"files_preserve",
".",
"append",
"(",
"handler",
".",
"socket",
".",
"fileno",
"(",
")",
")",
"runner",
".",
"daemon_context",
".",
"files_preserve",
"=",
"files_preserve",
"runner",
".",
"daemon_context",
".",
"signal_map",
".",
"update",
"(",
"{",
"signal",
".",
"SIGTERM",
":",
"service_obj",
".",
"_on_server_shutdown",
",",
"signal",
".",
"SIGINT",
":",
"service_obj",
".",
"_on_server_shutdown",
",",
"}",
")",
"logger",
".",
"debug",
"(",
"\"daemon_context\"",
",",
"extra",
"=",
"{",
"\"daemon_context\"",
":",
"vars",
"(",
"runner",
".",
"daemon_context",
")",
"}",
")",
"for",
"integration_name",
"in",
"integration",
":",
"integration_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"INTEGRATIONS",
",",
"integration_name",
",",
"editable",
")",
"configure_integration",
"(",
"integration_path",
")",
"click",
".",
"secho",
"(",
"\"[+] Launching {} {}\"",
".",
"format",
"(",
"service",
".",
"name",
",",
"\"in daemon mode\"",
"if",
"daemon",
"else",
"\"\"",
")",
")",
"try",
":",
"# save service_args for external reference (see test)",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"ARGS_JSON",
")",
",",
"\"w\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"service_args",
")",
")",
"runner",
".",
"_start",
"(",
")",
"if",
"daemon",
"else",
"service_obj",
".",
"run",
"(",
")",
"except",
"KeyboardInterrupt",
":",
"service_obj",
".",
"_on_server_shutdown",
"(",
")",
"click",
".",
"secho",
"(",
"\"[*] {} has stopped\"",
".",
"format",
"(",
"service",
".",
"name",
")",
")"
] | Load and run a specific service. | [
"Load",
"and",
"run",
"a",
"specific",
"service",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/service/run.py#L30-L88 | train |
Cymmetria/honeycomb | honeycomb/servicemanager/base_service.py | DockerService.read_lines | def read_lines(self, file_path, empty_lines=False, signal_ready=True):
"""Fetch lines from file.
In case the file handler changes (logrotate), reopen the file.
:param file_path: Path to file
:param empty_lines: Return empty lines
:param signal_ready: Report signal ready on start
"""
file_handler, file_id = self._get_file(file_path)
file_handler.seek(0, os.SEEK_END)
if signal_ready:
self.signal_ready()
while self.thread_server.is_alive():
line = six.text_type(file_handler.readline(), "utf-8")
if line:
yield line
continue
elif empty_lines:
yield line
time.sleep(0.1)
if file_id != self._get_file_id(os.stat(file_path)) and os.path.isfile(file_path):
file_handler, file_id = self._get_file(file_path) | python | def read_lines(self, file_path, empty_lines=False, signal_ready=True):
"""Fetch lines from file.
In case the file handler changes (logrotate), reopen the file.
:param file_path: Path to file
:param empty_lines: Return empty lines
:param signal_ready: Report signal ready on start
"""
file_handler, file_id = self._get_file(file_path)
file_handler.seek(0, os.SEEK_END)
if signal_ready:
self.signal_ready()
while self.thread_server.is_alive():
line = six.text_type(file_handler.readline(), "utf-8")
if line:
yield line
continue
elif empty_lines:
yield line
time.sleep(0.1)
if file_id != self._get_file_id(os.stat(file_path)) and os.path.isfile(file_path):
file_handler, file_id = self._get_file(file_path) | [
"def",
"read_lines",
"(",
"self",
",",
"file_path",
",",
"empty_lines",
"=",
"False",
",",
"signal_ready",
"=",
"True",
")",
":",
"file_handler",
",",
"file_id",
"=",
"self",
".",
"_get_file",
"(",
"file_path",
")",
"file_handler",
".",
"seek",
"(",
"0",
",",
"os",
".",
"SEEK_END",
")",
"if",
"signal_ready",
":",
"self",
".",
"signal_ready",
"(",
")",
"while",
"self",
".",
"thread_server",
".",
"is_alive",
"(",
")",
":",
"line",
"=",
"six",
".",
"text_type",
"(",
"file_handler",
".",
"readline",
"(",
")",
",",
"\"utf-8\"",
")",
"if",
"line",
":",
"yield",
"line",
"continue",
"elif",
"empty_lines",
":",
"yield",
"line",
"time",
".",
"sleep",
"(",
"0.1",
")",
"if",
"file_id",
"!=",
"self",
".",
"_get_file_id",
"(",
"os",
".",
"stat",
"(",
"file_path",
")",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"file_path",
")",
":",
"file_handler",
",",
"file_id",
"=",
"self",
".",
"_get_file",
"(",
"file_path",
")"
] | Fetch lines from file.
In case the file handler changes (logrotate), reopen the file.
:param file_path: Path to file
:param empty_lines: Return empty lines
:param signal_ready: Report signal ready on start | [
"Fetch",
"lines",
"from",
"file",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/servicemanager/base_service.py#L171-L197 | train |
Cymmetria/honeycomb | honeycomb/servicemanager/base_service.py | DockerService.on_server_start | def on_server_start(self):
"""Service run loop function.
Run the desired docker container with parameters and start parsing the monitored file for alerts.
"""
self._container = self._docker_client.containers.run(self.docker_image_name, detach=True, **self.docker_params)
self.signal_ready()
for log_line in self.get_lines():
try:
alert_dict = self.parse_line(log_line)
if alert_dict:
self.add_alert_to_queue(alert_dict)
except Exception:
self.logger.exception(None) | python | def on_server_start(self):
"""Service run loop function.
Run the desired docker container with parameters and start parsing the monitored file for alerts.
"""
self._container = self._docker_client.containers.run(self.docker_image_name, detach=True, **self.docker_params)
self.signal_ready()
for log_line in self.get_lines():
try:
alert_dict = self.parse_line(log_line)
if alert_dict:
self.add_alert_to_queue(alert_dict)
except Exception:
self.logger.exception(None) | [
"def",
"on_server_start",
"(",
"self",
")",
":",
"self",
".",
"_container",
"=",
"self",
".",
"_docker_client",
".",
"containers",
".",
"run",
"(",
"self",
".",
"docker_image_name",
",",
"detach",
"=",
"True",
",",
"*",
"*",
"self",
".",
"docker_params",
")",
"self",
".",
"signal_ready",
"(",
")",
"for",
"log_line",
"in",
"self",
".",
"get_lines",
"(",
")",
":",
"try",
":",
"alert_dict",
"=",
"self",
".",
"parse_line",
"(",
"log_line",
")",
"if",
"alert_dict",
":",
"self",
".",
"add_alert_to_queue",
"(",
"alert_dict",
")",
"except",
"Exception",
":",
"self",
".",
"logger",
".",
"exception",
"(",
"None",
")"
] | Service run loop function.
Run the desired docker container with parameters and start parsing the monitored file for alerts. | [
"Service",
"run",
"loop",
"function",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/servicemanager/base_service.py#L212-L226 | train |
Cymmetria/honeycomb | honeycomb/servicemanager/base_service.py | DockerService.on_server_shutdown | def on_server_shutdown(self):
"""Stop the container before shutting down."""
if not self._container:
return
self._container.stop()
self._container.remove(v=True, force=True) | python | def on_server_shutdown(self):
"""Stop the container before shutting down."""
if not self._container:
return
self._container.stop()
self._container.remove(v=True, force=True) | [
"def",
"on_server_shutdown",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_container",
":",
"return",
"self",
".",
"_container",
".",
"stop",
"(",
")",
"self",
".",
"_container",
".",
"remove",
"(",
"v",
"=",
"True",
",",
"force",
"=",
"True",
")"
] | Stop the container before shutting down. | [
"Stop",
"the",
"container",
"before",
"shutting",
"down",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/servicemanager/base_service.py#L228-L233 | train |
Cymmetria/honeycomb | honeycomb/commands/integration/uninstall.py | uninstall | def uninstall(ctx, yes, integrations):
"""Uninstall a integration."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
for integration in integrations:
integration_path = plugin_utils.get_plugin_path(home, INTEGRATIONS, integration)
plugin_utils.uninstall_plugin(integration_path, yes) | python | def uninstall(ctx, yes, integrations):
"""Uninstall a integration."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
for integration in integrations:
integration_path = plugin_utils.get_plugin_path(home, INTEGRATIONS, integration)
plugin_utils.uninstall_plugin(integration_path, yes) | [
"def",
"uninstall",
"(",
"ctx",
",",
"yes",
",",
"integrations",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"for",
"integration",
"in",
"integrations",
":",
"integration_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"INTEGRATIONS",
",",
"integration",
")",
"plugin_utils",
".",
"uninstall_plugin",
"(",
"integration_path",
",",
"yes",
")"
] | Uninstall a integration. | [
"Uninstall",
"a",
"integration",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/integration/uninstall.py#L18-L27 | train |
Cymmetria/honeycomb | honeycomb/commands/service/install.py | install | def install(ctx, services, delete_after_install=False):
"""Install a honeypot service from the online library, local path or zipfile."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
services_path = os.path.join(home, SERVICES)
installed_all_plugins = True
for service in services:
try:
plugin_utils.install_plugin(service, SERVICE, services_path, register_service)
except exceptions.PluginAlreadyInstalled as exc:
click.echo(exc)
installed_all_plugins = False
if not installed_all_plugins:
raise ctx.exit(errno.EEXIST) | python | def install(ctx, services, delete_after_install=False):
"""Install a honeypot service from the online library, local path or zipfile."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
services_path = os.path.join(home, SERVICES)
installed_all_plugins = True
for service in services:
try:
plugin_utils.install_plugin(service, SERVICE, services_path, register_service)
except exceptions.PluginAlreadyInstalled as exc:
click.echo(exc)
installed_all_plugins = False
if not installed_all_plugins:
raise ctx.exit(errno.EEXIST) | [
"def",
"install",
"(",
"ctx",
",",
"services",
",",
"delete_after_install",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"services_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"SERVICES",
")",
"installed_all_plugins",
"=",
"True",
"for",
"service",
"in",
"services",
":",
"try",
":",
"plugin_utils",
".",
"install_plugin",
"(",
"service",
",",
"SERVICE",
",",
"services_path",
",",
"register_service",
")",
"except",
"exceptions",
".",
"PluginAlreadyInstalled",
"as",
"exc",
":",
"click",
".",
"echo",
"(",
"exc",
")",
"installed_all_plugins",
"=",
"False",
"if",
"not",
"installed_all_plugins",
":",
"raise",
"ctx",
".",
"exit",
"(",
"errno",
".",
"EEXIST",
")"
] | Install a honeypot service from the online library, local path or zipfile. | [
"Install",
"a",
"honeypot",
"service",
"from",
"the",
"online",
"library",
"local",
"path",
"or",
"zipfile",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/service/install.py#L21-L38 | train |
Cymmetria/honeycomb | honeycomb/commands/service/uninstall.py | uninstall | def uninstall(ctx, yes, services):
"""Uninstall a service."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
for service in services:
service_path = plugin_utils.get_plugin_path(home, SERVICES, service)
plugin_utils.uninstall_plugin(service_path, yes) | python | def uninstall(ctx, yes, services):
"""Uninstall a service."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
for service in services:
service_path = plugin_utils.get_plugin_path(home, SERVICES, service)
plugin_utils.uninstall_plugin(service_path, yes) | [
"def",
"uninstall",
"(",
"ctx",
",",
"yes",
",",
"services",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"for",
"service",
"in",
"services",
":",
"service_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"SERVICES",
",",
"service",
")",
"plugin_utils",
".",
"uninstall_plugin",
"(",
"service_path",
",",
"yes",
")"
] | Uninstall a service. | [
"Uninstall",
"a",
"service",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/service/uninstall.py#L18-L27 | train |
Cymmetria/honeycomb | honeycomb/servicemanager/registration.py | get_service_module | def get_service_module(service_path):
"""Add custom paths to sys and import service module.
:param service_path: Path to service folder
"""
# add custom paths so imports would work
paths = [
os.path.dirname(__file__), # this folder, to catch base_service
os.path.realpath(os.path.join(service_path, "..")), # service's parent folder for import
os.path.realpath(os.path.join(service_path)), # service's folder for local imports
os.path.realpath(os.path.join(service_path, DEPS_DIR)), # deps dir
]
for path in paths:
path = os.path.realpath(path)
logger.debug("adding %s to path", path)
sys.path.insert(0, path)
# get our service class instance
service_name = os.path.basename(service_path)
module = ".".join([service_name, service_name + "_service"])
logger.debug("importing %s", module)
return importlib.import_module(module) | python | def get_service_module(service_path):
"""Add custom paths to sys and import service module.
:param service_path: Path to service folder
"""
# add custom paths so imports would work
paths = [
os.path.dirname(__file__), # this folder, to catch base_service
os.path.realpath(os.path.join(service_path, "..")), # service's parent folder for import
os.path.realpath(os.path.join(service_path)), # service's folder for local imports
os.path.realpath(os.path.join(service_path, DEPS_DIR)), # deps dir
]
for path in paths:
path = os.path.realpath(path)
logger.debug("adding %s to path", path)
sys.path.insert(0, path)
# get our service class instance
service_name = os.path.basename(service_path)
module = ".".join([service_name, service_name + "_service"])
logger.debug("importing %s", module)
return importlib.import_module(module) | [
"def",
"get_service_module",
"(",
"service_path",
")",
":",
"# add custom paths so imports would work",
"paths",
"=",
"[",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"# this folder, to catch base_service",
"os",
".",
"path",
".",
"realpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"\"..\"",
")",
")",
",",
"# service's parent folder for import",
"os",
".",
"path",
".",
"realpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
")",
")",
",",
"# service's folder for local imports",
"os",
".",
"path",
".",
"realpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"service_path",
",",
"DEPS_DIR",
")",
")",
",",
"# deps dir",
"]",
"for",
"path",
"in",
"paths",
":",
"path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"path",
")",
"logger",
".",
"debug",
"(",
"\"adding %s to path\"",
",",
"path",
")",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"path",
")",
"# get our service class instance",
"service_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"service_path",
")",
"module",
"=",
"\".\"",
".",
"join",
"(",
"[",
"service_name",
",",
"service_name",
"+",
"\"_service\"",
"]",
")",
"logger",
".",
"debug",
"(",
"\"importing %s\"",
",",
"module",
")",
"return",
"importlib",
".",
"import_module",
"(",
"module",
")"
] | Add custom paths to sys and import service module.
:param service_path: Path to service folder | [
"Add",
"custom",
"paths",
"to",
"sys",
"and",
"import",
"service",
"module",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/servicemanager/registration.py#L26-L48 | train |
Cymmetria/honeycomb | honeycomb/servicemanager/registration.py | register_service | def register_service(package_folder):
"""Register a honeycomb service.
:param package_folder: Path to folder with service to load
:returns: Validated service object
:rtype: :func:`honeycomb.utils.defs.ServiceType`
"""
logger.debug("registering service %s", package_folder)
package_folder = os.path.realpath(package_folder)
if not os.path.exists(package_folder):
raise ServiceNotFound(os.path.basename(package_folder))
json_config_path = os.path.join(package_folder, CONFIG_FILE_NAME)
if not os.path.exists(json_config_path):
raise ConfigFileNotFound(json_config_path)
with open(json_config_path, "r") as f:
config_json = json.load(f)
# Validate service and alert config
config_utils.validate_config(config_json, defs.SERVICE_ALERT_VALIDATE_FIELDS)
config_utils.validate_config(config_json.get(defs.SERVICE_CONFIG_SECTION_KEY, {}),
defs.SERVICE_CONFIG_VALIDATE_FIELDS)
_validate_supported_platform(config_json)
_validate_alert_configs(config_json)
config_utils.validate_config_parameters(config_json,
defs.SERVICE_ALLOWED_PARAMTER_KEYS,
defs.SERVICE_ALLOWED_PARAMTER_TYPES)
service_type = _create_service_object(config_json)
service_type.alert_types = _create_alert_types(config_json, service_type)
return service_type | python | def register_service(package_folder):
"""Register a honeycomb service.
:param package_folder: Path to folder with service to load
:returns: Validated service object
:rtype: :func:`honeycomb.utils.defs.ServiceType`
"""
logger.debug("registering service %s", package_folder)
package_folder = os.path.realpath(package_folder)
if not os.path.exists(package_folder):
raise ServiceNotFound(os.path.basename(package_folder))
json_config_path = os.path.join(package_folder, CONFIG_FILE_NAME)
if not os.path.exists(json_config_path):
raise ConfigFileNotFound(json_config_path)
with open(json_config_path, "r") as f:
config_json = json.load(f)
# Validate service and alert config
config_utils.validate_config(config_json, defs.SERVICE_ALERT_VALIDATE_FIELDS)
config_utils.validate_config(config_json.get(defs.SERVICE_CONFIG_SECTION_KEY, {}),
defs.SERVICE_CONFIG_VALIDATE_FIELDS)
_validate_supported_platform(config_json)
_validate_alert_configs(config_json)
config_utils.validate_config_parameters(config_json,
defs.SERVICE_ALLOWED_PARAMTER_KEYS,
defs.SERVICE_ALLOWED_PARAMTER_TYPES)
service_type = _create_service_object(config_json)
service_type.alert_types = _create_alert_types(config_json, service_type)
return service_type | [
"def",
"register_service",
"(",
"package_folder",
")",
":",
"logger",
".",
"debug",
"(",
"\"registering service %s\"",
",",
"package_folder",
")",
"package_folder",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"package_folder",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"package_folder",
")",
":",
"raise",
"ServiceNotFound",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"package_folder",
")",
")",
"json_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"package_folder",
",",
"CONFIG_FILE_NAME",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"json_config_path",
")",
":",
"raise",
"ConfigFileNotFound",
"(",
"json_config_path",
")",
"with",
"open",
"(",
"json_config_path",
",",
"\"r\"",
")",
"as",
"f",
":",
"config_json",
"=",
"json",
".",
"load",
"(",
"f",
")",
"# Validate service and alert config",
"config_utils",
".",
"validate_config",
"(",
"config_json",
",",
"defs",
".",
"SERVICE_ALERT_VALIDATE_FIELDS",
")",
"config_utils",
".",
"validate_config",
"(",
"config_json",
".",
"get",
"(",
"defs",
".",
"SERVICE_CONFIG_SECTION_KEY",
",",
"{",
"}",
")",
",",
"defs",
".",
"SERVICE_CONFIG_VALIDATE_FIELDS",
")",
"_validate_supported_platform",
"(",
"config_json",
")",
"_validate_alert_configs",
"(",
"config_json",
")",
"config_utils",
".",
"validate_config_parameters",
"(",
"config_json",
",",
"defs",
".",
"SERVICE_ALLOWED_PARAMTER_KEYS",
",",
"defs",
".",
"SERVICE_ALLOWED_PARAMTER_TYPES",
")",
"service_type",
"=",
"_create_service_object",
"(",
"config_json",
")",
"service_type",
".",
"alert_types",
"=",
"_create_alert_types",
"(",
"config_json",
",",
"service_type",
")",
"return",
"service_type"
] | Register a honeycomb service.
:param package_folder: Path to folder with service to load
:returns: Validated service object
:rtype: :func:`honeycomb.utils.defs.ServiceType` | [
"Register",
"a",
"honeycomb",
"service",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/servicemanager/registration.py#L51-L84 | train |
Cymmetria/honeycomb | honeycomb/commands/integration/install.py | install | def install(ctx, integrations, delete_after_install=False):
"""Install a honeycomb integration from the online library, local path or zipfile."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
integrations_path = os.path.join(home, INTEGRATIONS)
installed_all_plugins = True
for integration in integrations:
try:
plugin_utils.install_plugin(integration, INTEGRATION, integrations_path, register_integration)
except exceptions.PluginAlreadyInstalled as exc:
click.echo(exc)
installed_all_plugins = False
if not installed_all_plugins:
raise ctx.exit(errno.EEXIST) | python | def install(ctx, integrations, delete_after_install=False):
"""Install a honeycomb integration from the online library, local path or zipfile."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
integrations_path = os.path.join(home, INTEGRATIONS)
installed_all_plugins = True
for integration in integrations:
try:
plugin_utils.install_plugin(integration, INTEGRATION, integrations_path, register_integration)
except exceptions.PluginAlreadyInstalled as exc:
click.echo(exc)
installed_all_plugins = False
if not installed_all_plugins:
raise ctx.exit(errno.EEXIST) | [
"def",
"install",
"(",
"ctx",
",",
"integrations",
",",
"delete_after_install",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"integrations_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"INTEGRATIONS",
")",
"installed_all_plugins",
"=",
"True",
"for",
"integration",
"in",
"integrations",
":",
"try",
":",
"plugin_utils",
".",
"install_plugin",
"(",
"integration",
",",
"INTEGRATION",
",",
"integrations_path",
",",
"register_integration",
")",
"except",
"exceptions",
".",
"PluginAlreadyInstalled",
"as",
"exc",
":",
"click",
".",
"echo",
"(",
"exc",
")",
"installed_all_plugins",
"=",
"False",
"if",
"not",
"installed_all_plugins",
":",
"raise",
"ctx",
".",
"exit",
"(",
"errno",
".",
"EEXIST",
")"
] | Install a honeycomb integration from the online library, local path or zipfile. | [
"Install",
"a",
"honeycomb",
"integration",
"from",
"the",
"online",
"library",
"local",
"path",
"or",
"zipfile",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/integration/install.py#L21-L38 | train |
Cymmetria/honeycomb | honeycomb/commands/integration/configure.py | configure | def configure(ctx, integration, args, show_args, editable):
"""Configure an integration with default parameters.
You can still provide one-off integration arguments to :func:`honeycomb.commands.service.run` if required.
"""
home = ctx.obj["HOME"]
integration_path = plugin_utils.get_plugin_path(home, defs.INTEGRATIONS, integration, editable)
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
logger.debug("loading {} ({})".format(integration, integration_path))
integration = register_integration(integration_path)
if show_args:
return plugin_utils.print_plugin_args(integration_path)
# get our integration class instance
integration_args = plugin_utils.parse_plugin_args(args, config_utils.get_config_parameters(integration_path))
args_file = os.path.join(integration_path, defs.ARGS_JSON)
with open(args_file, "w") as f:
data = json.dumps(integration_args)
logger.debug("writing %s to %s", data, args_file)
f.write(json.dumps(integration_args))
click.secho("[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`"
.format(integration.name)) | python | def configure(ctx, integration, args, show_args, editable):
"""Configure an integration with default parameters.
You can still provide one-off integration arguments to :func:`honeycomb.commands.service.run` if required.
"""
home = ctx.obj["HOME"]
integration_path = plugin_utils.get_plugin_path(home, defs.INTEGRATIONS, integration, editable)
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
logger.debug("loading {} ({})".format(integration, integration_path))
integration = register_integration(integration_path)
if show_args:
return plugin_utils.print_plugin_args(integration_path)
# get our integration class instance
integration_args = plugin_utils.parse_plugin_args(args, config_utils.get_config_parameters(integration_path))
args_file = os.path.join(integration_path, defs.ARGS_JSON)
with open(args_file, "w") as f:
data = json.dumps(integration_args)
logger.debug("writing %s to %s", data, args_file)
f.write(json.dumps(integration_args))
click.secho("[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`"
.format(integration.name)) | [
"def",
"configure",
"(",
"ctx",
",",
"integration",
",",
"args",
",",
"show_args",
",",
"editable",
")",
":",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"integration_path",
"=",
"plugin_utils",
".",
"get_plugin_path",
"(",
"home",
",",
"defs",
".",
"INTEGRATIONS",
",",
"integration",
",",
"editable",
")",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"logger",
".",
"debug",
"(",
"\"loading {} ({})\"",
".",
"format",
"(",
"integration",
",",
"integration_path",
")",
")",
"integration",
"=",
"register_integration",
"(",
"integration_path",
")",
"if",
"show_args",
":",
"return",
"plugin_utils",
".",
"print_plugin_args",
"(",
"integration_path",
")",
"# get our integration class instance",
"integration_args",
"=",
"plugin_utils",
".",
"parse_plugin_args",
"(",
"args",
",",
"config_utils",
".",
"get_config_parameters",
"(",
"integration_path",
")",
")",
"args_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"integration_path",
",",
"defs",
".",
"ARGS_JSON",
")",
"with",
"open",
"(",
"args_file",
",",
"\"w\"",
")",
"as",
"f",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"integration_args",
")",
"logger",
".",
"debug",
"(",
"\"writing %s to %s\"",
",",
"data",
",",
"args_file",
")",
"f",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"integration_args",
")",
")",
"click",
".",
"secho",
"(",
"\"[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`\"",
".",
"format",
"(",
"integration",
".",
"name",
")",
")"
] | Configure an integration with default parameters.
You can still provide one-off integration arguments to :func:`honeycomb.commands.service.run` if required. | [
"Configure",
"an",
"integration",
"with",
"default",
"parameters",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/commands/integration/configure.py#L24-L51 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_match_history | def get_match_history(self, account_id=None, **kwargs):
"""Returns a dictionary containing a list of the most recent Dota matches
:param account_id: (int, optional)
:param hero_id: (int, optional)
:param game_mode: (int, optional) see ``ref/modes.json``
:param skill: (int, optional) see ``ref/skill.json``
:param min_players: (int, optional) only return matches with minimum
amount of players
:param league_id: (int, optional) for ids use ``get_league_listing()``
:param start_at_match_id: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:param tournament_games_only: (str, optional) limit results to
tournament matches only
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'account_id' not in kwargs:
kwargs['account_id'] = account_id
url = self.__build_url(urls.GET_MATCH_HISTORY, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_match_history(self, account_id=None, **kwargs):
"""Returns a dictionary containing a list of the most recent Dota matches
:param account_id: (int, optional)
:param hero_id: (int, optional)
:param game_mode: (int, optional) see ``ref/modes.json``
:param skill: (int, optional) see ``ref/skill.json``
:param min_players: (int, optional) only return matches with minimum
amount of players
:param league_id: (int, optional) for ids use ``get_league_listing()``
:param start_at_match_id: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:param tournament_games_only: (str, optional) limit results to
tournament matches only
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'account_id' not in kwargs:
kwargs['account_id'] = account_id
url = self.__build_url(urls.GET_MATCH_HISTORY, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_match_history",
"(",
"self",
",",
"account_id",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'account_id'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'account_id'",
"]",
"=",
"account_id",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_MATCH_HISTORY",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a list of the most recent Dota matches
:param account_id: (int, optional)
:param hero_id: (int, optional)
:param game_mode: (int, optional) see ``ref/modes.json``
:param skill: (int, optional) see ``ref/skill.json``
:param min_players: (int, optional) only return matches with minimum
amount of players
:param league_id: (int, optional) for ids use ``get_league_listing()``
:param start_at_match_id: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:param tournament_games_only: (str, optional) limit results to
tournament matches only
:return: dictionary of matches, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"list",
"of",
"the",
"most",
"recent",
"Dota",
"matches"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L66-L90 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_match_history_by_seq_num | def get_match_history_by_seq_num(self, start_at_match_seq_num=None, **kwargs):
"""Returns a dictionary containing a list of Dota matches in the order they were recorded
:param start_at_match_seq_num: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'start_at_match_seq_num' not in kwargs:
kwargs['start_at_match_seq_num'] = start_at_match_seq_num
url = self.__build_url(urls.GET_MATCH_HISTORY_BY_SEQ_NUM, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_match_history_by_seq_num(self, start_at_match_seq_num=None, **kwargs):
"""Returns a dictionary containing a list of Dota matches in the order they were recorded
:param start_at_match_seq_num: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'start_at_match_seq_num' not in kwargs:
kwargs['start_at_match_seq_num'] = start_at_match_seq_num
url = self.__build_url(urls.GET_MATCH_HISTORY_BY_SEQ_NUM, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_match_history_by_seq_num",
"(",
"self",
",",
"start_at_match_seq_num",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'start_at_match_seq_num'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'start_at_match_seq_num'",
"]",
"=",
"start_at_match_seq_num",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_MATCH_HISTORY_BY_SEQ_NUM",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a list of Dota matches in the order they were recorded
:param start_at_match_seq_num: (int, optional) start at matches equal to or
older than this match id
:param matches_requested: (int, optional) defaults to ``100``
:return: dictionary of matches, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"list",
"of",
"Dota",
"matches",
"in",
"the",
"order",
"they",
"were",
"recorded"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L92-L107 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_match_details | def get_match_details(self, match_id=None, **kwargs):
"""Returns a dictionary containing the details for a Dota 2 match
:param match_id: (int, optional)
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'match_id' not in kwargs:
kwargs['match_id'] = match_id
url = self.__build_url(urls.GET_MATCH_DETAILS, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_match_details(self, match_id=None, **kwargs):
"""Returns a dictionary containing the details for a Dota 2 match
:param match_id: (int, optional)
:return: dictionary of matches, see :doc:`responses </responses>`
"""
if 'match_id' not in kwargs:
kwargs['match_id'] = match_id
url = self.__build_url(urls.GET_MATCH_DETAILS, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_match_details",
"(",
"self",
",",
"match_id",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'match_id'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'match_id'",
"]",
"=",
"match_id",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_MATCH_DETAILS",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing the details for a Dota 2 match
:param match_id: (int, optional)
:return: dictionary of matches, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"the",
"details",
"for",
"a",
"Dota",
"2",
"match"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L109-L122 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_league_listing | def get_league_listing(self):
"""Returns a dictionary containing a list of all ticketed leagues
:return: dictionary of ticketed leagues, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LEAGUE_LISTING)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_league_listing(self):
"""Returns a dictionary containing a list of all ticketed leagues
:return: dictionary of ticketed leagues, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LEAGUE_LISTING)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_league_listing",
"(",
"self",
")",
":",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_LEAGUE_LISTING",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a list of all ticketed leagues
:return: dictionary of ticketed leagues, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"list",
"of",
"all",
"ticketed",
"leagues"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L124-L134 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_live_league_games | def get_live_league_games(self):
"""Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LIVE_LEAGUE_GAMES)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_live_league_games(self):
"""Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LIVE_LEAGUE_GAMES)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_live_league_games",
"(",
"self",
")",
":",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_LIVE_LEAGUE_GAMES",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"list",
"of",
"ticked",
"games",
"in",
"progress"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L136-L146 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_team_info_by_team_id | def get_team_info_by_team_id(self, start_at_team_id=None, **kwargs):
"""Returns a dictionary containing a in-game teams
:param start_at_team_id: (int, optional)
:param teams_requested: (int, optional)
:return: dictionary of teams, see :doc:`responses </responses>`
"""
if 'start_at_team_id' not in kwargs:
kwargs['start_at_team_id'] = start_at_team_id
url = self.__build_url(urls.GET_TEAM_INFO_BY_TEAM_ID, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_team_info_by_team_id(self, start_at_team_id=None, **kwargs):
"""Returns a dictionary containing a in-game teams
:param start_at_team_id: (int, optional)
:param teams_requested: (int, optional)
:return: dictionary of teams, see :doc:`responses </responses>`
"""
if 'start_at_team_id' not in kwargs:
kwargs['start_at_team_id'] = start_at_team_id
url = self.__build_url(urls.GET_TEAM_INFO_BY_TEAM_ID, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_team_info_by_team_id",
"(",
"self",
",",
"start_at_team_id",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'start_at_team_id'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'start_at_team_id'",
"]",
"=",
"start_at_team_id",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_TEAM_INFO_BY_TEAM_ID",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a in-game teams
:param start_at_team_id: (int, optional)
:param teams_requested: (int, optional)
:return: dictionary of teams, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"in",
"-",
"game",
"teams"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L148-L162 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_player_summaries | def get_player_summaries(self, steamids=None, **kwargs):
"""Returns a dictionary containing a player summaries
:param steamids: (list) list of ``32-bit`` or ``64-bit`` steam ids, notice
that api will convert if ``32-bit`` are given
:return: dictionary of player summaries, see :doc:`responses </responses>`
"""
if not isinstance(steamids, collections.Iterable):
steamids = [steamids]
base64_ids = list(map(convert_to_64_bit, filter(lambda x: x is not None, steamids)))
if 'steamids' not in kwargs:
kwargs['steamids'] = base64_ids
url = self.__build_url(urls.GET_PLAYER_SUMMARIES, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_player_summaries(self, steamids=None, **kwargs):
"""Returns a dictionary containing a player summaries
:param steamids: (list) list of ``32-bit`` or ``64-bit`` steam ids, notice
that api will convert if ``32-bit`` are given
:return: dictionary of player summaries, see :doc:`responses </responses>`
"""
if not isinstance(steamids, collections.Iterable):
steamids = [steamids]
base64_ids = list(map(convert_to_64_bit, filter(lambda x: x is not None, steamids)))
if 'steamids' not in kwargs:
kwargs['steamids'] = base64_ids
url = self.__build_url(urls.GET_PLAYER_SUMMARIES, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_player_summaries",
"(",
"self",
",",
"steamids",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"steamids",
",",
"collections",
".",
"Iterable",
")",
":",
"steamids",
"=",
"[",
"steamids",
"]",
"base64_ids",
"=",
"list",
"(",
"map",
"(",
"convert_to_64_bit",
",",
"filter",
"(",
"lambda",
"x",
":",
"x",
"is",
"not",
"None",
",",
"steamids",
")",
")",
")",
"if",
"'steamids'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'steamids'",
"]",
"=",
"base64_ids",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_PLAYER_SUMMARIES",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary containing a player summaries
:param steamids: (list) list of ``32-bit`` or ``64-bit`` steam ids, notice
that api will convert if ``32-bit`` are given
:return: dictionary of player summaries, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"containing",
"a",
"player",
"summaries"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L164-L183 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_heroes | def get_heroes(self, **kwargs):
"""Returns a dictionary of in-game heroes, used to parse ids into localised names
:return: dictionary of heroes, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_HEROES, language=self.language, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_heroes(self, **kwargs):
"""Returns a dictionary of in-game heroes, used to parse ids into localised names
:return: dictionary of heroes, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_HEROES, language=self.language, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_heroes",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_HEROES",
",",
"language",
"=",
"self",
".",
"language",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary of in-game heroes, used to parse ids into localised names
:return: dictionary of heroes, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"of",
"in",
"-",
"game",
"heroes",
"used",
"to",
"parse",
"ids",
"into",
"localised",
"names"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L185-L195 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_tournament_prize_pool | def get_tournament_prize_pool(self, leagueid=None, **kwargs):
"""Returns a dictionary that includes community funded tournament prize pools
:param leagueid: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>`
"""
if 'leagueid' not in kwargs:
kwargs['leagueid'] = leagueid
url = self.__build_url(urls.GET_TOURNAMENT_PRIZE_POOL, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_tournament_prize_pool(self, leagueid=None, **kwargs):
"""Returns a dictionary that includes community funded tournament prize pools
:param leagueid: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>`
"""
if 'leagueid' not in kwargs:
kwargs['leagueid'] = leagueid
url = self.__build_url(urls.GET_TOURNAMENT_PRIZE_POOL, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_tournament_prize_pool",
"(",
"self",
",",
"leagueid",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'leagueid'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'leagueid'",
"]",
"=",
"leagueid",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_TOURNAMENT_PRIZE_POOL",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary that includes community funded tournament prize pools
:param leagueid: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"that",
"includes",
"community",
"funded",
"tournament",
"prize",
"pools"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L209-L222 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.get_top_live_games | def get_top_live_games(self, partner='', **kwargs):
"""Returns a dictionary that includes top MMR live games
:param partner: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>`
"""
if 'partner' not in kwargs:
kwargs['partner'] = partner
url = self.__build_url(urls.GET_TOP_LIVE_GAME, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | python | def get_top_live_games(self, partner='', **kwargs):
"""Returns a dictionary that includes top MMR live games
:param partner: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>`
"""
if 'partner' not in kwargs:
kwargs['partner'] = partner
url = self.__build_url(urls.GET_TOP_LIVE_GAME, **kwargs)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | [
"def",
"get_top_live_games",
"(",
"self",
",",
"partner",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'partner'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'partner'",
"]",
"=",
"partner",
"url",
"=",
"self",
".",
"__build_url",
"(",
"urls",
".",
"GET_TOP_LIVE_GAME",
",",
"*",
"*",
"kwargs",
")",
"req",
"=",
"self",
".",
"executor",
"(",
"url",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'URL: {0}'",
".",
"format",
"(",
"url",
")",
")",
"if",
"not",
"self",
".",
"__check_http_err",
"(",
"req",
".",
"status_code",
")",
":",
"return",
"response",
".",
"build",
"(",
"req",
",",
"url",
",",
"self",
".",
"raw_mode",
")"
] | Returns a dictionary that includes top MMR live games
:param partner: (int, optional)
:return: dictionary of prize pools, see :doc:`responses </responses>` | [
"Returns",
"a",
"dictionary",
"that",
"includes",
"top",
"MMR",
"live",
"games"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L224-L237 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.__build_url | def __build_url(self, api_call, **kwargs):
"""Builds the api query"""
kwargs['key'] = self.api_key
if 'language' not in kwargs:
kwargs['language'] = self.language
if 'format' not in kwargs:
kwargs['format'] = self.__format
api_query = urlencode(kwargs)
return "{0}{1}?{2}".format(urls.BASE_URL,
api_call,
api_query) | python | def __build_url(self, api_call, **kwargs):
"""Builds the api query"""
kwargs['key'] = self.api_key
if 'language' not in kwargs:
kwargs['language'] = self.language
if 'format' not in kwargs:
kwargs['format'] = self.__format
api_query = urlencode(kwargs)
return "{0}{1}?{2}".format(urls.BASE_URL,
api_call,
api_query) | [
"def",
"__build_url",
"(",
"self",
",",
"api_call",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'key'",
"]",
"=",
"self",
".",
"api_key",
"if",
"'language'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'language'",
"]",
"=",
"self",
".",
"language",
"if",
"'format'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'format'",
"]",
"=",
"self",
".",
"__format",
"api_query",
"=",
"urlencode",
"(",
"kwargs",
")",
"return",
"\"{0}{1}?{2}\"",
".",
"format",
"(",
"urls",
".",
"BASE_URL",
",",
"api_call",
",",
"api_query",
")"
] | Builds the api query | [
"Builds",
"the",
"api",
"query"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L251-L262 | train |
joshuaduffy/dota2api | dota2api/__init__.py | Initialise.__check_http_err | def __check_http_err(self, status_code):
"""Raises an exception if we get a http error"""
if status_code == 403:
raise exceptions.APIAuthenticationError(self.api_key)
elif status_code == 503:
raise exceptions.APITimeoutError()
else:
return False | python | def __check_http_err(self, status_code):
"""Raises an exception if we get a http error"""
if status_code == 403:
raise exceptions.APIAuthenticationError(self.api_key)
elif status_code == 503:
raise exceptions.APITimeoutError()
else:
return False | [
"def",
"__check_http_err",
"(",
"self",
",",
"status_code",
")",
":",
"if",
"status_code",
"==",
"403",
":",
"raise",
"exceptions",
".",
"APIAuthenticationError",
"(",
"self",
".",
"api_key",
")",
"elif",
"status_code",
"==",
"503",
":",
"raise",
"exceptions",
".",
"APITimeoutError",
"(",
")",
"else",
":",
"return",
"False"
] | Raises an exception if we get a http error | [
"Raises",
"an",
"exception",
"if",
"we",
"get",
"a",
"http",
"error"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/__init__.py#L264-L271 | train |
joshuaduffy/dota2api | dota2api/src/parse.py | item_id | def item_id(response):
"""
Parse the item ids, will be available as ``item_0_name``, ``item_1_name``,
``item_2_name`` and so on
"""
dict_keys = ['item_0', 'item_1', 'item_2',
'item_3', 'item_4', 'item_5']
new_keys = ['item_0_name', 'item_1_name', 'item_2_name',
'item_3_name', 'item_4_name', 'item_5_name']
for player in response['players']:
for key, new_key in zip(dict_keys, new_keys):
for item in items['items']:
if item['id'] == player[key]:
player[new_key] = item['localized_name']
return response | python | def item_id(response):
"""
Parse the item ids, will be available as ``item_0_name``, ``item_1_name``,
``item_2_name`` and so on
"""
dict_keys = ['item_0', 'item_1', 'item_2',
'item_3', 'item_4', 'item_5']
new_keys = ['item_0_name', 'item_1_name', 'item_2_name',
'item_3_name', 'item_4_name', 'item_5_name']
for player in response['players']:
for key, new_key in zip(dict_keys, new_keys):
for item in items['items']:
if item['id'] == player[key]:
player[new_key] = item['localized_name']
return response | [
"def",
"item_id",
"(",
"response",
")",
":",
"dict_keys",
"=",
"[",
"'item_0'",
",",
"'item_1'",
",",
"'item_2'",
",",
"'item_3'",
",",
"'item_4'",
",",
"'item_5'",
"]",
"new_keys",
"=",
"[",
"'item_0_name'",
",",
"'item_1_name'",
",",
"'item_2_name'",
",",
"'item_3_name'",
",",
"'item_4_name'",
",",
"'item_5_name'",
"]",
"for",
"player",
"in",
"response",
"[",
"'players'",
"]",
":",
"for",
"key",
",",
"new_key",
"in",
"zip",
"(",
"dict_keys",
",",
"new_keys",
")",
":",
"for",
"item",
"in",
"items",
"[",
"'items'",
"]",
":",
"if",
"item",
"[",
"'id'",
"]",
"==",
"player",
"[",
"key",
"]",
":",
"player",
"[",
"new_key",
"]",
"=",
"item",
"[",
"'localized_name'",
"]",
"return",
"response"
] | Parse the item ids, will be available as ``item_0_name``, ``item_1_name``,
``item_2_name`` and so on | [
"Parse",
"the",
"item",
"ids",
"will",
"be",
"available",
"as",
"item_0_name",
"item_1_name",
"item_2_name",
"and",
"so",
"on"
] | 03c9e1c609ec36728805bbd3ada0a53ec8f51e86 | https://github.com/joshuaduffy/dota2api/blob/03c9e1c609ec36728805bbd3ada0a53ec8f51e86/dota2api/src/parse.py#L40-L56 | train |
bitlabstudio/django-review | review/templatetags/review_tags.py | get_reviews | def get_reviews(obj):
"""Simply returns the reviews for an object."""
ctype = ContentType.objects.get_for_model(obj)
return models.Review.objects.filter(content_type=ctype, object_id=obj.id) | python | def get_reviews(obj):
"""Simply returns the reviews for an object."""
ctype = ContentType.objects.get_for_model(obj)
return models.Review.objects.filter(content_type=ctype, object_id=obj.id) | [
"def",
"get_reviews",
"(",
"obj",
")",
":",
"ctype",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
"return",
"models",
".",
"Review",
".",
"objects",
".",
"filter",
"(",
"content_type",
"=",
"ctype",
",",
"object_id",
"=",
"obj",
".",
"id",
")"
] | Simply returns the reviews for an object. | [
"Simply",
"returns",
"the",
"reviews",
"for",
"an",
"object",
"."
] | 70d4b5c8d52d9a5615e5d0f5c7f147e15573c566 | https://github.com/bitlabstudio/django-review/blob/70d4b5c8d52d9a5615e5d0f5c7f147e15573c566/review/templatetags/review_tags.py#L12-L15 | train |
bitlabstudio/django-review | review/templatetags/review_tags.py | get_review_average | def get_review_average(obj):
"""Returns the review average for an object."""
total = 0
reviews = get_reviews(obj)
if not reviews:
return False
for review in reviews:
average = review.get_average_rating()
if average:
total += review.get_average_rating()
if total > 0:
return total / reviews.count()
return False | python | def get_review_average(obj):
"""Returns the review average for an object."""
total = 0
reviews = get_reviews(obj)
if not reviews:
return False
for review in reviews:
average = review.get_average_rating()
if average:
total += review.get_average_rating()
if total > 0:
return total / reviews.count()
return False | [
"def",
"get_review_average",
"(",
"obj",
")",
":",
"total",
"=",
"0",
"reviews",
"=",
"get_reviews",
"(",
"obj",
")",
"if",
"not",
"reviews",
":",
"return",
"False",
"for",
"review",
"in",
"reviews",
":",
"average",
"=",
"review",
".",
"get_average_rating",
"(",
")",
"if",
"average",
":",
"total",
"+=",
"review",
".",
"get_average_rating",
"(",
")",
"if",
"total",
">",
"0",
":",
"return",
"total",
"/",
"reviews",
".",
"count",
"(",
")",
"return",
"False"
] | Returns the review average for an object. | [
"Returns",
"the",
"review",
"average",
"for",
"an",
"object",
"."
] | 70d4b5c8d52d9a5615e5d0f5c7f147e15573c566 | https://github.com/bitlabstudio/django-review/blob/70d4b5c8d52d9a5615e5d0f5c7f147e15573c566/review/templatetags/review_tags.py#L19-L31 | train |
bitlabstudio/django-review | review/templatetags/review_tags.py | render_category_averages | def render_category_averages(obj, normalize_to=100):
"""Renders all the sub-averages for each category."""
context = {'reviewed_item': obj}
ctype = ContentType.objects.get_for_model(obj)
reviews = models.Review.objects.filter(
content_type=ctype, object_id=obj.id)
category_averages = {}
for review in reviews:
review_category_averages = review.get_category_averages(normalize_to)
if review_category_averages:
for category, average in review_category_averages.items():
if category not in category_averages:
category_averages[category] = review_category_averages[
category]
else:
category_averages[category] += review_category_averages[
category]
if reviews and category_averages:
for category, average in category_averages.items():
category_averages[category] = \
category_averages[category] / models.Rating.objects.filter(
category=category, value__isnull=False,
review__content_type=ctype,
review__object_id=obj.id).exclude(value='').count()
else:
category_averages = {}
for category in models.RatingCategory.objects.filter(
counts_for_average=True):
category_averages[category] = 0.0
context.update({'category_averages': category_averages})
return context | python | def render_category_averages(obj, normalize_to=100):
"""Renders all the sub-averages for each category."""
context = {'reviewed_item': obj}
ctype = ContentType.objects.get_for_model(obj)
reviews = models.Review.objects.filter(
content_type=ctype, object_id=obj.id)
category_averages = {}
for review in reviews:
review_category_averages = review.get_category_averages(normalize_to)
if review_category_averages:
for category, average in review_category_averages.items():
if category not in category_averages:
category_averages[category] = review_category_averages[
category]
else:
category_averages[category] += review_category_averages[
category]
if reviews and category_averages:
for category, average in category_averages.items():
category_averages[category] = \
category_averages[category] / models.Rating.objects.filter(
category=category, value__isnull=False,
review__content_type=ctype,
review__object_id=obj.id).exclude(value='').count()
else:
category_averages = {}
for category in models.RatingCategory.objects.filter(
counts_for_average=True):
category_averages[category] = 0.0
context.update({'category_averages': category_averages})
return context | [
"def",
"render_category_averages",
"(",
"obj",
",",
"normalize_to",
"=",
"100",
")",
":",
"context",
"=",
"{",
"'reviewed_item'",
":",
"obj",
"}",
"ctype",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
"reviews",
"=",
"models",
".",
"Review",
".",
"objects",
".",
"filter",
"(",
"content_type",
"=",
"ctype",
",",
"object_id",
"=",
"obj",
".",
"id",
")",
"category_averages",
"=",
"{",
"}",
"for",
"review",
"in",
"reviews",
":",
"review_category_averages",
"=",
"review",
".",
"get_category_averages",
"(",
"normalize_to",
")",
"if",
"review_category_averages",
":",
"for",
"category",
",",
"average",
"in",
"review_category_averages",
".",
"items",
"(",
")",
":",
"if",
"category",
"not",
"in",
"category_averages",
":",
"category_averages",
"[",
"category",
"]",
"=",
"review_category_averages",
"[",
"category",
"]",
"else",
":",
"category_averages",
"[",
"category",
"]",
"+=",
"review_category_averages",
"[",
"category",
"]",
"if",
"reviews",
"and",
"category_averages",
":",
"for",
"category",
",",
"average",
"in",
"category_averages",
".",
"items",
"(",
")",
":",
"category_averages",
"[",
"category",
"]",
"=",
"category_averages",
"[",
"category",
"]",
"/",
"models",
".",
"Rating",
".",
"objects",
".",
"filter",
"(",
"category",
"=",
"category",
",",
"value__isnull",
"=",
"False",
",",
"review__content_type",
"=",
"ctype",
",",
"review__object_id",
"=",
"obj",
".",
"id",
")",
".",
"exclude",
"(",
"value",
"=",
"''",
")",
".",
"count",
"(",
")",
"else",
":",
"category_averages",
"=",
"{",
"}",
"for",
"category",
"in",
"models",
".",
"RatingCategory",
".",
"objects",
".",
"filter",
"(",
"counts_for_average",
"=",
"True",
")",
":",
"category_averages",
"[",
"category",
"]",
"=",
"0.0",
"context",
".",
"update",
"(",
"{",
"'category_averages'",
":",
"category_averages",
"}",
")",
"return",
"context"
] | Renders all the sub-averages for each category. | [
"Renders",
"all",
"the",
"sub",
"-",
"averages",
"for",
"each",
"category",
"."
] | 70d4b5c8d52d9a5615e5d0f5c7f147e15573c566 | https://github.com/bitlabstudio/django-review/blob/70d4b5c8d52d9a5615e5d0f5c7f147e15573c566/review/templatetags/review_tags.py#L41-L71 | train |
bitlabstudio/django-review | review/templatetags/review_tags.py | total_review_average | def total_review_average(obj, normalize_to=100):
"""Returns the average for all reviews of the given object."""
ctype = ContentType.objects.get_for_model(obj)
total_average = 0
reviews = models.Review.objects.filter(
content_type=ctype, object_id=obj.id)
for review in reviews:
total_average += review.get_average_rating(normalize_to)
if reviews:
total_average /= reviews.count()
return total_average | python | def total_review_average(obj, normalize_to=100):
"""Returns the average for all reviews of the given object."""
ctype = ContentType.objects.get_for_model(obj)
total_average = 0
reviews = models.Review.objects.filter(
content_type=ctype, object_id=obj.id)
for review in reviews:
total_average += review.get_average_rating(normalize_to)
if reviews:
total_average /= reviews.count()
return total_average | [
"def",
"total_review_average",
"(",
"obj",
",",
"normalize_to",
"=",
"100",
")",
":",
"ctype",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
"total_average",
"=",
"0",
"reviews",
"=",
"models",
".",
"Review",
".",
"objects",
".",
"filter",
"(",
"content_type",
"=",
"ctype",
",",
"object_id",
"=",
"obj",
".",
"id",
")",
"for",
"review",
"in",
"reviews",
":",
"total_average",
"+=",
"review",
".",
"get_average_rating",
"(",
"normalize_to",
")",
"if",
"reviews",
":",
"total_average",
"/=",
"reviews",
".",
"count",
"(",
")",
"return",
"total_average"
] | Returns the average for all reviews of the given object. | [
"Returns",
"the",
"average",
"for",
"all",
"reviews",
"of",
"the",
"given",
"object",
"."
] | 70d4b5c8d52d9a5615e5d0f5c7f147e15573c566 | https://github.com/bitlabstudio/django-review/blob/70d4b5c8d52d9a5615e5d0f5c7f147e15573c566/review/templatetags/review_tags.py#L75-L85 | train |
bitlabstudio/django-review | review/templatetags/review_tags.py | user_has_reviewed | def user_has_reviewed(obj, user):
"""Returns True if the user has already reviewed the object."""
ctype = ContentType.objects.get_for_model(obj)
try:
models.Review.objects.get(user=user, content_type=ctype,
object_id=obj.id)
except models.Review.DoesNotExist:
return False
return True | python | def user_has_reviewed(obj, user):
"""Returns True if the user has already reviewed the object."""
ctype = ContentType.objects.get_for_model(obj)
try:
models.Review.objects.get(user=user, content_type=ctype,
object_id=obj.id)
except models.Review.DoesNotExist:
return False
return True | [
"def",
"user_has_reviewed",
"(",
"obj",
",",
"user",
")",
":",
"ctype",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
"try",
":",
"models",
".",
"Review",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"user",
",",
"content_type",
"=",
"ctype",
",",
"object_id",
"=",
"obj",
".",
"id",
")",
"except",
"models",
".",
"Review",
".",
"DoesNotExist",
":",
"return",
"False",
"return",
"True"
] | Returns True if the user has already reviewed the object. | [
"Returns",
"True",
"if",
"the",
"user",
"has",
"already",
"reviewed",
"the",
"object",
"."
] | 70d4b5c8d52d9a5615e5d0f5c7f147e15573c566 | https://github.com/bitlabstudio/django-review/blob/70d4b5c8d52d9a5615e5d0f5c7f147e15573c566/review/templatetags/review_tags.py#L89-L97 | train |
ahawker/ulid | ulid/base32.py | str_to_bytes | def str_to_bytes(value: str, expected_length: int) -> bytes:
"""
Convert the given string to bytes and validate it is within the Base32 character set.
:param value: String to convert to bytes
:type value: :class:`~str`
:param expected_length: Expected length of the input string
:type expected_length: :class:`~int`
:return: Value converted to bytes.
:rtype: :class:`~bytes`
"""
length = len(value)
if length != expected_length:
raise ValueError('Expects {} characters for decoding; got {}'.format(expected_length, length))
try:
encoded = value.encode('ascii')
except UnicodeEncodeError as ex:
raise ValueError('Expects value that can be encoded in ASCII charset: {}'.format(ex))
decoding = DECODING
# Confirm all bytes are valid Base32 decode characters.
# Note: ASCII encoding handles the out of range checking for us.
for byte in encoded:
if decoding[byte] > 31:
raise ValueError('Non-base32 character found: "{}"'.format(chr(byte)))
return encoded | python | def str_to_bytes(value: str, expected_length: int) -> bytes:
"""
Convert the given string to bytes and validate it is within the Base32 character set.
:param value: String to convert to bytes
:type value: :class:`~str`
:param expected_length: Expected length of the input string
:type expected_length: :class:`~int`
:return: Value converted to bytes.
:rtype: :class:`~bytes`
"""
length = len(value)
if length != expected_length:
raise ValueError('Expects {} characters for decoding; got {}'.format(expected_length, length))
try:
encoded = value.encode('ascii')
except UnicodeEncodeError as ex:
raise ValueError('Expects value that can be encoded in ASCII charset: {}'.format(ex))
decoding = DECODING
# Confirm all bytes are valid Base32 decode characters.
# Note: ASCII encoding handles the out of range checking for us.
for byte in encoded:
if decoding[byte] > 31:
raise ValueError('Non-base32 character found: "{}"'.format(chr(byte)))
return encoded | [
"def",
"str_to_bytes",
"(",
"value",
":",
"str",
",",
"expected_length",
":",
"int",
")",
"->",
"bytes",
":",
"length",
"=",
"len",
"(",
"value",
")",
"if",
"length",
"!=",
"expected_length",
":",
"raise",
"ValueError",
"(",
"'Expects {} characters for decoding; got {}'",
".",
"format",
"(",
"expected_length",
",",
"length",
")",
")",
"try",
":",
"encoded",
"=",
"value",
".",
"encode",
"(",
"'ascii'",
")",
"except",
"UnicodeEncodeError",
"as",
"ex",
":",
"raise",
"ValueError",
"(",
"'Expects value that can be encoded in ASCII charset: {}'",
".",
"format",
"(",
"ex",
")",
")",
"decoding",
"=",
"DECODING",
"# Confirm all bytes are valid Base32 decode characters.",
"# Note: ASCII encoding handles the out of range checking for us.",
"for",
"byte",
"in",
"encoded",
":",
"if",
"decoding",
"[",
"byte",
"]",
">",
"31",
":",
"raise",
"ValueError",
"(",
"'Non-base32 character found: \"{}\"'",
".",
"format",
"(",
"chr",
"(",
"byte",
")",
")",
")",
"return",
"encoded"
] | Convert the given string to bytes and validate it is within the Base32 character set.
:param value: String to convert to bytes
:type value: :class:`~str`
:param expected_length: Expected length of the input string
:type expected_length: :class:`~int`
:return: Value converted to bytes.
:rtype: :class:`~bytes` | [
"Convert",
"the",
"given",
"string",
"to",
"bytes",
"and",
"validate",
"it",
"is",
"within",
"the",
"Base32",
"character",
"set",
"."
] | f6459bafebbd1a1ffd71a8718bd5592c2e4dd59f | https://github.com/ahawker/ulid/blob/f6459bafebbd1a1ffd71a8718bd5592c2e4dd59f/ulid/base32.py#L340-L368 | train |
noxdafox/pebble | setup.py | package_version | def package_version():
"""Get the package version via Git Tag."""
version_path = os.path.join(os.path.dirname(__file__), 'version.py')
version = read_version(version_path)
write_version(version_path, version)
return version | python | def package_version():
"""Get the package version via Git Tag."""
version_path = os.path.join(os.path.dirname(__file__), 'version.py')
version = read_version(version_path)
write_version(version_path, version)
return version | [
"def",
"package_version",
"(",
")",
":",
"version_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'version.py'",
")",
"version",
"=",
"read_version",
"(",
"version_path",
")",
"write_version",
"(",
"version_path",
",",
"version",
")",
"return",
"version"
] | Get the package version via Git Tag. | [
"Get",
"the",
"package",
"version",
"via",
"Git",
"Tag",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/setup.py#L10-L17 | train |
noxdafox/pebble | pebble/decorators.py | synchronized | def synchronized(*args):
"""A synchronized function prevents two or more callers to interleave
its execution preventing race conditions.
The synchronized decorator accepts as optional parameter a Lock, RLock or
Semaphore object which will be employed to ensure the function's atomicity.
If no synchronization object is given, a single threading.Lock will be used.
This implies that between different decorated function only one at a time
will be executed.
"""
if callable(args[0]):
return decorate_synchronized(args[0], _synchronized_lock)
else:
def wrap(function):
return decorate_synchronized(function, args[0])
return wrap | python | def synchronized(*args):
"""A synchronized function prevents two or more callers to interleave
its execution preventing race conditions.
The synchronized decorator accepts as optional parameter a Lock, RLock or
Semaphore object which will be employed to ensure the function's atomicity.
If no synchronization object is given, a single threading.Lock will be used.
This implies that between different decorated function only one at a time
will be executed.
"""
if callable(args[0]):
return decorate_synchronized(args[0], _synchronized_lock)
else:
def wrap(function):
return decorate_synchronized(function, args[0])
return wrap | [
"def",
"synchronized",
"(",
"*",
"args",
")",
":",
"if",
"callable",
"(",
"args",
"[",
"0",
"]",
")",
":",
"return",
"decorate_synchronized",
"(",
"args",
"[",
"0",
"]",
",",
"_synchronized_lock",
")",
"else",
":",
"def",
"wrap",
"(",
"function",
")",
":",
"return",
"decorate_synchronized",
"(",
"function",
",",
"args",
"[",
"0",
"]",
")",
"return",
"wrap"
] | A synchronized function prevents two or more callers to interleave
its execution preventing race conditions.
The synchronized decorator accepts as optional parameter a Lock, RLock or
Semaphore object which will be employed to ensure the function's atomicity.
If no synchronization object is given, a single threading.Lock will be used.
This implies that between different decorated function only one at a time
will be executed. | [
"A",
"synchronized",
"function",
"prevents",
"two",
"or",
"more",
"callers",
"to",
"interleave",
"its",
"execution",
"preventing",
"race",
"conditions",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/decorators.py#L26-L44 | train |
noxdafox/pebble | pebble/pool/thread.py | worker_thread | def worker_thread(context):
"""The worker thread routines."""
queue = context.task_queue
parameters = context.worker_parameters
if parameters.initializer is not None:
if not run_initializer(parameters.initializer, parameters.initargs):
context.state = ERROR
return
for task in get_next_task(context, parameters.max_tasks):
execute_next_task(task)
queue.task_done() | python | def worker_thread(context):
"""The worker thread routines."""
queue = context.task_queue
parameters = context.worker_parameters
if parameters.initializer is not None:
if not run_initializer(parameters.initializer, parameters.initargs):
context.state = ERROR
return
for task in get_next_task(context, parameters.max_tasks):
execute_next_task(task)
queue.task_done() | [
"def",
"worker_thread",
"(",
"context",
")",
":",
"queue",
"=",
"context",
".",
"task_queue",
"parameters",
"=",
"context",
".",
"worker_parameters",
"if",
"parameters",
".",
"initializer",
"is",
"not",
"None",
":",
"if",
"not",
"run_initializer",
"(",
"parameters",
".",
"initializer",
",",
"parameters",
".",
"initargs",
")",
":",
"context",
".",
"state",
"=",
"ERROR",
"return",
"for",
"task",
"in",
"get_next_task",
"(",
"context",
",",
"parameters",
".",
"max_tasks",
")",
":",
"execute_next_task",
"(",
"task",
")",
"queue",
".",
"task_done",
"(",
")"
] | The worker thread routines. | [
"The",
"worker",
"thread",
"routines",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/thread.py#L155-L167 | train |
noxdafox/pebble | pebble/common.py | stop_process | def stop_process(process):
"""Does its best to stop the process."""
process.terminate()
process.join(3)
if process.is_alive() and os.name != 'nt':
try:
os.kill(process.pid, signal.SIGKILL)
process.join()
except OSError:
return
if process.is_alive():
raise RuntimeError("Unable to terminate PID %d" % os.getpid()) | python | def stop_process(process):
"""Does its best to stop the process."""
process.terminate()
process.join(3)
if process.is_alive() and os.name != 'nt':
try:
os.kill(process.pid, signal.SIGKILL)
process.join()
except OSError:
return
if process.is_alive():
raise RuntimeError("Unable to terminate PID %d" % os.getpid()) | [
"def",
"stop_process",
"(",
"process",
")",
":",
"process",
".",
"terminate",
"(",
")",
"process",
".",
"join",
"(",
"3",
")",
"if",
"process",
".",
"is_alive",
"(",
")",
"and",
"os",
".",
"name",
"!=",
"'nt'",
":",
"try",
":",
"os",
".",
"kill",
"(",
"process",
".",
"pid",
",",
"signal",
".",
"SIGKILL",
")",
"process",
".",
"join",
"(",
")",
"except",
"OSError",
":",
"return",
"if",
"process",
".",
"is_alive",
"(",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Unable to terminate PID %d\"",
"%",
"os",
".",
"getpid",
"(",
")",
")"
] | Does its best to stop the process. | [
"Does",
"its",
"best",
"to",
"stop",
"the",
"process",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/common.py#L143-L156 | train |
noxdafox/pebble | pebble/common.py | send_result | def send_result(pipe, data):
"""Send result handling pickling and communication errors."""
try:
pipe.send(data)
except (pickle.PicklingError, TypeError) as error:
error.traceback = format_exc()
pipe.send(RemoteException(error, error.traceback)) | python | def send_result(pipe, data):
"""Send result handling pickling and communication errors."""
try:
pipe.send(data)
except (pickle.PicklingError, TypeError) as error:
error.traceback = format_exc()
pipe.send(RemoteException(error, error.traceback)) | [
"def",
"send_result",
"(",
"pipe",
",",
"data",
")",
":",
"try",
":",
"pipe",
".",
"send",
"(",
"data",
")",
"except",
"(",
"pickle",
".",
"PicklingError",
",",
"TypeError",
")",
"as",
"error",
":",
"error",
".",
"traceback",
"=",
"format_exc",
"(",
")",
"pipe",
".",
"send",
"(",
"RemoteException",
"(",
"error",
",",
"error",
".",
"traceback",
")",
")"
] | Send result handling pickling and communication errors. | [
"Send",
"result",
"handling",
"pickling",
"and",
"communication",
"errors",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/common.py#L177-L183 | train |
noxdafox/pebble | pebble/concurrent/process.py | process | def process(*args, **kwargs):
"""Runs the decorated function in a concurrent process,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called.
The timeout parameter will set a maximum execution time
for the decorated function. If the execution exceeds the timeout,
the process will be stopped and the Future will raise TimeoutError.
"""
timeout = kwargs.get('timeout')
# decorator without parameters
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return _process_wrapper(args[0], timeout)
else:
# decorator with parameters
if timeout is not None and not isinstance(timeout, (int, float)):
raise TypeError('Timeout expected to be None or integer or float')
def decorating_function(function):
return _process_wrapper(function, timeout)
return decorating_function | python | def process(*args, **kwargs):
"""Runs the decorated function in a concurrent process,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called.
The timeout parameter will set a maximum execution time
for the decorated function. If the execution exceeds the timeout,
the process will be stopped and the Future will raise TimeoutError.
"""
timeout = kwargs.get('timeout')
# decorator without parameters
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return _process_wrapper(args[0], timeout)
else:
# decorator with parameters
if timeout is not None and not isinstance(timeout, (int, float)):
raise TypeError('Timeout expected to be None or integer or float')
def decorating_function(function):
return _process_wrapper(function, timeout)
return decorating_function | [
"def",
"process",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"timeout",
"=",
"kwargs",
".",
"get",
"(",
"'timeout'",
")",
"# decorator without parameters",
"if",
"len",
"(",
"args",
")",
"==",
"1",
"and",
"len",
"(",
"kwargs",
")",
"==",
"0",
"and",
"callable",
"(",
"args",
"[",
"0",
"]",
")",
":",
"return",
"_process_wrapper",
"(",
"args",
"[",
"0",
"]",
",",
"timeout",
")",
"else",
":",
"# decorator with parameters",
"if",
"timeout",
"is",
"not",
"None",
"and",
"not",
"isinstance",
"(",
"timeout",
",",
"(",
"int",
",",
"float",
")",
")",
":",
"raise",
"TypeError",
"(",
"'Timeout expected to be None or integer or float'",
")",
"def",
"decorating_function",
"(",
"function",
")",
":",
"return",
"_process_wrapper",
"(",
"function",
",",
"timeout",
")",
"return",
"decorating_function"
] | Runs the decorated function in a concurrent process,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called.
The timeout parameter will set a maximum execution time
for the decorated function. If the execution exceeds the timeout,
the process will be stopped and the Future will raise TimeoutError. | [
"Runs",
"the",
"decorated",
"function",
"in",
"a",
"concurrent",
"process",
"taking",
"care",
"of",
"the",
"result",
"and",
"error",
"management",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L36-L61 | train |
noxdafox/pebble | pebble/concurrent/process.py | _worker_handler | def _worker_handler(future, worker, pipe, timeout):
"""Worker lifecycle manager.
Waits for the worker to be perform its task,
collects result, runs the callback and cleans up the process.
"""
result = _get_result(future, pipe, timeout)
if isinstance(result, BaseException):
if isinstance(result, ProcessExpired):
result.exitcode = worker.exitcode
future.set_exception(result)
else:
future.set_result(result)
if worker.is_alive():
stop_process(worker) | python | def _worker_handler(future, worker, pipe, timeout):
"""Worker lifecycle manager.
Waits for the worker to be perform its task,
collects result, runs the callback and cleans up the process.
"""
result = _get_result(future, pipe, timeout)
if isinstance(result, BaseException):
if isinstance(result, ProcessExpired):
result.exitcode = worker.exitcode
future.set_exception(result)
else:
future.set_result(result)
if worker.is_alive():
stop_process(worker) | [
"def",
"_worker_handler",
"(",
"future",
",",
"worker",
",",
"pipe",
",",
"timeout",
")",
":",
"result",
"=",
"_get_result",
"(",
"future",
",",
"pipe",
",",
"timeout",
")",
"if",
"isinstance",
"(",
"result",
",",
"BaseException",
")",
":",
"if",
"isinstance",
"(",
"result",
",",
"ProcessExpired",
")",
":",
"result",
".",
"exitcode",
"=",
"worker",
".",
"exitcode",
"future",
".",
"set_exception",
"(",
"result",
")",
"else",
":",
"future",
".",
"set_result",
"(",
"result",
")",
"if",
"worker",
".",
"is_alive",
"(",
")",
":",
"stop_process",
"(",
"worker",
")"
] | Worker lifecycle manager.
Waits for the worker to be perform its task,
collects result, runs the callback and cleans up the process. | [
"Worker",
"lifecycle",
"manager",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L92-L110 | train |
noxdafox/pebble | pebble/concurrent/process.py | _function_handler | def _function_handler(function, args, kwargs, pipe):
"""Runs the actual function in separate process and returns its result."""
signal.signal(signal.SIGINT, signal.SIG_IGN)
result = process_execute(function, *args, **kwargs)
send_result(pipe, result) | python | def _function_handler(function, args, kwargs, pipe):
"""Runs the actual function in separate process and returns its result."""
signal.signal(signal.SIGINT, signal.SIG_IGN)
result = process_execute(function, *args, **kwargs)
send_result(pipe, result) | [
"def",
"_function_handler",
"(",
"function",
",",
"args",
",",
"kwargs",
",",
"pipe",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"signal",
".",
"SIG_IGN",
")",
"result",
"=",
"process_execute",
"(",
"function",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"send_result",
"(",
"pipe",
",",
"result",
")"
] | Runs the actual function in separate process and returns its result. | [
"Runs",
"the",
"actual",
"function",
"in",
"separate",
"process",
"and",
"returns",
"its",
"result",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L113-L119 | train |
noxdafox/pebble | pebble/concurrent/process.py | _get_result | def _get_result(future, pipe, timeout):
"""Waits for result and handles communication errors."""
counter = count(step=SLEEP_UNIT)
try:
while not pipe.poll(SLEEP_UNIT):
if timeout is not None and next(counter) >= timeout:
return TimeoutError('Task Timeout', timeout)
elif future.cancelled():
return CancelledError()
return pipe.recv()
except (EOFError, OSError):
return ProcessExpired('Abnormal termination')
except Exception as error:
return error | python | def _get_result(future, pipe, timeout):
"""Waits for result and handles communication errors."""
counter = count(step=SLEEP_UNIT)
try:
while not pipe.poll(SLEEP_UNIT):
if timeout is not None and next(counter) >= timeout:
return TimeoutError('Task Timeout', timeout)
elif future.cancelled():
return CancelledError()
return pipe.recv()
except (EOFError, OSError):
return ProcessExpired('Abnormal termination')
except Exception as error:
return error | [
"def",
"_get_result",
"(",
"future",
",",
"pipe",
",",
"timeout",
")",
":",
"counter",
"=",
"count",
"(",
"step",
"=",
"SLEEP_UNIT",
")",
"try",
":",
"while",
"not",
"pipe",
".",
"poll",
"(",
"SLEEP_UNIT",
")",
":",
"if",
"timeout",
"is",
"not",
"None",
"and",
"next",
"(",
"counter",
")",
">=",
"timeout",
":",
"return",
"TimeoutError",
"(",
"'Task Timeout'",
",",
"timeout",
")",
"elif",
"future",
".",
"cancelled",
"(",
")",
":",
"return",
"CancelledError",
"(",
")",
"return",
"pipe",
".",
"recv",
"(",
")",
"except",
"(",
"EOFError",
",",
"OSError",
")",
":",
"return",
"ProcessExpired",
"(",
"'Abnormal termination'",
")",
"except",
"Exception",
"as",
"error",
":",
"return",
"error"
] | Waits for result and handles communication errors. | [
"Waits",
"for",
"result",
"and",
"handles",
"communication",
"errors",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L122-L137 | train |
noxdafox/pebble | pebble/concurrent/process.py | _trampoline | def _trampoline(name, module, *args, **kwargs):
"""Trampoline function for decorators.
Lookups the function between the registered ones;
if not found, forces its registering and then executes it.
"""
function = _function_lookup(name, module)
return function(*args, **kwargs) | python | def _trampoline(name, module, *args, **kwargs):
"""Trampoline function for decorators.
Lookups the function between the registered ones;
if not found, forces its registering and then executes it.
"""
function = _function_lookup(name, module)
return function(*args, **kwargs) | [
"def",
"_trampoline",
"(",
"name",
",",
"module",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"function",
"=",
"_function_lookup",
"(",
"name",
",",
"module",
")",
"return",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Trampoline function for decorators.
Lookups the function between the registered ones;
if not found, forces its registering and then executes it. | [
"Trampoline",
"function",
"for",
"decorators",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L152-L161 | train |
noxdafox/pebble | pebble/concurrent/process.py | _function_lookup | def _function_lookup(name, module):
"""Searches the function between the registered ones.
If not found, it imports the module forcing its registration.
"""
try:
return _registered_functions[name]
except KeyError: # force function registering
__import__(module)
mod = sys.modules[module]
getattr(mod, name)
return _registered_functions[name] | python | def _function_lookup(name, module):
"""Searches the function between the registered ones.
If not found, it imports the module forcing its registration.
"""
try:
return _registered_functions[name]
except KeyError: # force function registering
__import__(module)
mod = sys.modules[module]
getattr(mod, name)
return _registered_functions[name] | [
"def",
"_function_lookup",
"(",
"name",
",",
"module",
")",
":",
"try",
":",
"return",
"_registered_functions",
"[",
"name",
"]",
"except",
"KeyError",
":",
"# force function registering",
"__import__",
"(",
"module",
")",
"mod",
"=",
"sys",
".",
"modules",
"[",
"module",
"]",
"getattr",
"(",
"mod",
",",
"name",
")",
"return",
"_registered_functions",
"[",
"name",
"]"
] | Searches the function between the registered ones.
If not found, it imports the module forcing its registration. | [
"Searches",
"the",
"function",
"between",
"the",
"registered",
"ones",
".",
"If",
"not",
"found",
"it",
"imports",
"the",
"module",
"forcing",
"its",
"registration",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/process.py#L164-L176 | train |
noxdafox/pebble | pebble/pool/process.py | worker_process | def worker_process(params, channel):
"""The worker process routines."""
signal(SIGINT, SIG_IGN)
if params.initializer is not None:
if not run_initializer(params.initializer, params.initargs):
os._exit(1)
try:
for task in worker_get_next_task(channel, params.max_tasks):
payload = task.payload
result = process_execute(
payload.function, *payload.args, **payload.kwargs)
send_result(channel, Result(task.id, result))
except (EnvironmentError, OSError, RuntimeError) as error:
os._exit(error.errno if error.errno else 1)
except EOFError:
os._exit(0) | python | def worker_process(params, channel):
"""The worker process routines."""
signal(SIGINT, SIG_IGN)
if params.initializer is not None:
if not run_initializer(params.initializer, params.initargs):
os._exit(1)
try:
for task in worker_get_next_task(channel, params.max_tasks):
payload = task.payload
result = process_execute(
payload.function, *payload.args, **payload.kwargs)
send_result(channel, Result(task.id, result))
except (EnvironmentError, OSError, RuntimeError) as error:
os._exit(error.errno if error.errno else 1)
except EOFError:
os._exit(0) | [
"def",
"worker_process",
"(",
"params",
",",
"channel",
")",
":",
"signal",
"(",
"SIGINT",
",",
"SIG_IGN",
")",
"if",
"params",
".",
"initializer",
"is",
"not",
"None",
":",
"if",
"not",
"run_initializer",
"(",
"params",
".",
"initializer",
",",
"params",
".",
"initargs",
")",
":",
"os",
".",
"_exit",
"(",
"1",
")",
"try",
":",
"for",
"task",
"in",
"worker_get_next_task",
"(",
"channel",
",",
"params",
".",
"max_tasks",
")",
":",
"payload",
"=",
"task",
".",
"payload",
"result",
"=",
"process_execute",
"(",
"payload",
".",
"function",
",",
"*",
"payload",
".",
"args",
",",
"*",
"*",
"payload",
".",
"kwargs",
")",
"send_result",
"(",
"channel",
",",
"Result",
"(",
"task",
".",
"id",
",",
"result",
")",
")",
"except",
"(",
"EnvironmentError",
",",
"OSError",
",",
"RuntimeError",
")",
"as",
"error",
":",
"os",
".",
"_exit",
"(",
"error",
".",
"errno",
"if",
"error",
".",
"errno",
"else",
"1",
")",
"except",
"EOFError",
":",
"os",
".",
"_exit",
"(",
"0",
")"
] | The worker process routines. | [
"The",
"worker",
"process",
"routines",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L375-L392 | train |
noxdafox/pebble | pebble/pool/process.py | task_transaction | def task_transaction(channel):
"""Ensures a task is fetched and acknowledged atomically."""
with channel.lock:
if channel.poll(0):
task = channel.recv()
channel.send(Acknowledgement(os.getpid(), task.id))
else:
raise RuntimeError("Race condition between workers")
return task | python | def task_transaction(channel):
"""Ensures a task is fetched and acknowledged atomically."""
with channel.lock:
if channel.poll(0):
task = channel.recv()
channel.send(Acknowledgement(os.getpid(), task.id))
else:
raise RuntimeError("Race condition between workers")
return task | [
"def",
"task_transaction",
"(",
"channel",
")",
":",
"with",
"channel",
".",
"lock",
":",
"if",
"channel",
".",
"poll",
"(",
"0",
")",
":",
"task",
"=",
"channel",
".",
"recv",
"(",
")",
"channel",
".",
"send",
"(",
"Acknowledgement",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"task",
".",
"id",
")",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Race condition between workers\"",
")",
"return",
"task"
] | Ensures a task is fetched and acknowledged atomically. | [
"Ensures",
"a",
"task",
"is",
"fetched",
"and",
"acknowledged",
"atomically",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L410-L419 | train |
noxdafox/pebble | pebble/pool/process.py | PoolManager.schedule | def schedule(self, task):
"""Schedules a new Task in the PoolManager."""
self.task_manager.register(task)
self.worker_manager.dispatch(task) | python | def schedule(self, task):
"""Schedules a new Task in the PoolManager."""
self.task_manager.register(task)
self.worker_manager.dispatch(task) | [
"def",
"schedule",
"(",
"self",
",",
"task",
")",
":",
"self",
".",
"task_manager",
".",
"register",
"(",
"task",
")",
"self",
".",
"worker_manager",
".",
"dispatch",
"(",
"task",
")"
] | Schedules a new Task in the PoolManager. | [
"Schedules",
"a",
"new",
"Task",
"in",
"the",
"PoolManager",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L194-L197 | train |
noxdafox/pebble | pebble/pool/process.py | PoolManager.process_next_message | def process_next_message(self, timeout):
"""Processes the next message coming from the workers."""
message = self.worker_manager.receive(timeout)
if isinstance(message, Acknowledgement):
self.task_manager.task_start(message.task, message.worker)
elif isinstance(message, Result):
self.task_manager.task_done(message.task, message.result) | python | def process_next_message(self, timeout):
"""Processes the next message coming from the workers."""
message = self.worker_manager.receive(timeout)
if isinstance(message, Acknowledgement):
self.task_manager.task_start(message.task, message.worker)
elif isinstance(message, Result):
self.task_manager.task_done(message.task, message.result) | [
"def",
"process_next_message",
"(",
"self",
",",
"timeout",
")",
":",
"message",
"=",
"self",
".",
"worker_manager",
".",
"receive",
"(",
"timeout",
")",
"if",
"isinstance",
"(",
"message",
",",
"Acknowledgement",
")",
":",
"self",
".",
"task_manager",
".",
"task_start",
"(",
"message",
".",
"task",
",",
"message",
".",
"worker",
")",
"elif",
"isinstance",
"(",
"message",
",",
"Result",
")",
":",
"self",
".",
"task_manager",
".",
"task_done",
"(",
"message",
".",
"task",
",",
"message",
".",
"result",
")"
] | Processes the next message coming from the workers. | [
"Processes",
"the",
"next",
"message",
"coming",
"from",
"the",
"workers",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L199-L206 | train |
noxdafox/pebble | pebble/pool/process.py | PoolManager.update_tasks | def update_tasks(self):
"""Handles timing out Tasks."""
for task in self.task_manager.timeout_tasks():
self.task_manager.task_done(
task.id, TimeoutError("Task timeout", task.timeout))
self.worker_manager.stop_worker(task.worker_id)
for task in self.task_manager.cancelled_tasks():
self.task_manager.task_done(
task.id, CancelledError())
self.worker_manager.stop_worker(task.worker_id) | python | def update_tasks(self):
"""Handles timing out Tasks."""
for task in self.task_manager.timeout_tasks():
self.task_manager.task_done(
task.id, TimeoutError("Task timeout", task.timeout))
self.worker_manager.stop_worker(task.worker_id)
for task in self.task_manager.cancelled_tasks():
self.task_manager.task_done(
task.id, CancelledError())
self.worker_manager.stop_worker(task.worker_id) | [
"def",
"update_tasks",
"(",
"self",
")",
":",
"for",
"task",
"in",
"self",
".",
"task_manager",
".",
"timeout_tasks",
"(",
")",
":",
"self",
".",
"task_manager",
".",
"task_done",
"(",
"task",
".",
"id",
",",
"TimeoutError",
"(",
"\"Task timeout\"",
",",
"task",
".",
"timeout",
")",
")",
"self",
".",
"worker_manager",
".",
"stop_worker",
"(",
"task",
".",
"worker_id",
")",
"for",
"task",
"in",
"self",
".",
"task_manager",
".",
"cancelled_tasks",
"(",
")",
":",
"self",
".",
"task_manager",
".",
"task_done",
"(",
"task",
".",
"id",
",",
"CancelledError",
"(",
")",
")",
"self",
".",
"worker_manager",
".",
"stop_worker",
"(",
"task",
".",
"worker_id",
")"
] | Handles timing out Tasks. | [
"Handles",
"timing",
"out",
"Tasks",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L212-L222 | train |
noxdafox/pebble | pebble/pool/process.py | PoolManager.update_workers | def update_workers(self):
"""Handles unexpected processes termination."""
for expiration in self.worker_manager.inspect_workers():
self.handle_worker_expiration(expiration)
self.worker_manager.create_workers() | python | def update_workers(self):
"""Handles unexpected processes termination."""
for expiration in self.worker_manager.inspect_workers():
self.handle_worker_expiration(expiration)
self.worker_manager.create_workers() | [
"def",
"update_workers",
"(",
"self",
")",
":",
"for",
"expiration",
"in",
"self",
".",
"worker_manager",
".",
"inspect_workers",
"(",
")",
":",
"self",
".",
"handle_worker_expiration",
"(",
"expiration",
")",
"self",
".",
"worker_manager",
".",
"create_workers",
"(",
")"
] | Handles unexpected processes termination. | [
"Handles",
"unexpected",
"processes",
"termination",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L224-L229 | train |
noxdafox/pebble | pebble/pool/process.py | TaskManager.task_done | def task_done(self, task_id, result):
"""Set the tasks result and run the callback."""
try:
task = self.tasks.pop(task_id)
except KeyError:
return # result of previously timeout Task
else:
if task.future.cancelled():
task.set_running_or_notify_cancel()
elif isinstance(result, BaseException):
task.future.set_exception(result)
else:
task.future.set_result(result)
self.task_done_callback() | python | def task_done(self, task_id, result):
"""Set the tasks result and run the callback."""
try:
task = self.tasks.pop(task_id)
except KeyError:
return # result of previously timeout Task
else:
if task.future.cancelled():
task.set_running_or_notify_cancel()
elif isinstance(result, BaseException):
task.future.set_exception(result)
else:
task.future.set_result(result)
self.task_done_callback() | [
"def",
"task_done",
"(",
"self",
",",
"task_id",
",",
"result",
")",
":",
"try",
":",
"task",
"=",
"self",
".",
"tasks",
".",
"pop",
"(",
"task_id",
")",
"except",
"KeyError",
":",
"return",
"# result of previously timeout Task",
"else",
":",
"if",
"task",
".",
"future",
".",
"cancelled",
"(",
")",
":",
"task",
".",
"set_running_or_notify_cancel",
"(",
")",
"elif",
"isinstance",
"(",
"result",
",",
"BaseException",
")",
":",
"task",
".",
"future",
".",
"set_exception",
"(",
"result",
")",
"else",
":",
"task",
".",
"future",
".",
"set_result",
"(",
"result",
")",
"self",
".",
"task_done_callback",
"(",
")"
] | Set the tasks result and run the callback. | [
"Set",
"the",
"tasks",
"result",
"and",
"run",
"the",
"callback",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L271-L285 | train |
noxdafox/pebble | pebble/pool/process.py | WorkerManager.inspect_workers | def inspect_workers(self):
"""Updates the workers status.
Returns the workers which have unexpectedly ended.
"""
workers = tuple(self.workers.values())
expired = tuple(w for w in workers if not w.is_alive())
for worker in expired:
self.workers.pop(worker.pid)
return ((w.pid, w.exitcode) for w in expired if w.exitcode != 0) | python | def inspect_workers(self):
"""Updates the workers status.
Returns the workers which have unexpectedly ended.
"""
workers = tuple(self.workers.values())
expired = tuple(w for w in workers if not w.is_alive())
for worker in expired:
self.workers.pop(worker.pid)
return ((w.pid, w.exitcode) for w in expired if w.exitcode != 0) | [
"def",
"inspect_workers",
"(",
"self",
")",
":",
"workers",
"=",
"tuple",
"(",
"self",
".",
"workers",
".",
"values",
"(",
")",
")",
"expired",
"=",
"tuple",
"(",
"w",
"for",
"w",
"in",
"workers",
"if",
"not",
"w",
".",
"is_alive",
"(",
")",
")",
"for",
"worker",
"in",
"expired",
":",
"self",
".",
"workers",
".",
"pop",
"(",
"worker",
".",
"pid",
")",
"return",
"(",
"(",
"w",
".",
"pid",
",",
"w",
".",
"exitcode",
")",
"for",
"w",
"in",
"expired",
"if",
"w",
".",
"exitcode",
"!=",
"0",
")"
] | Updates the workers status.
Returns the workers which have unexpectedly ended. | [
"Updates",
"the",
"workers",
"status",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/process.py#L328-L340 | train |
noxdafox/pebble | pebble/pool/base_pool.py | iter_chunks | def iter_chunks(chunksize, *iterables):
"""Iterates over zipped iterables in chunks."""
iterables = iter(zip(*iterables))
while 1:
chunk = tuple(islice(iterables, chunksize))
if not chunk:
return
yield chunk | python | def iter_chunks(chunksize, *iterables):
"""Iterates over zipped iterables in chunks."""
iterables = iter(zip(*iterables))
while 1:
chunk = tuple(islice(iterables, chunksize))
if not chunk:
return
yield chunk | [
"def",
"iter_chunks",
"(",
"chunksize",
",",
"*",
"iterables",
")",
":",
"iterables",
"=",
"iter",
"(",
"zip",
"(",
"*",
"iterables",
")",
")",
"while",
"1",
":",
"chunk",
"=",
"tuple",
"(",
"islice",
"(",
"iterables",
",",
"chunksize",
")",
")",
"if",
"not",
"chunk",
":",
"return",
"yield",
"chunk"
] | Iterates over zipped iterables in chunks. | [
"Iterates",
"over",
"zipped",
"iterables",
"in",
"chunks",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/base_pool.py#L218-L228 | train |
noxdafox/pebble | pebble/pool/base_pool.py | run_initializer | def run_initializer(initializer, initargs):
"""Runs the Pool initializer dealing with errors."""
try:
initializer(*initargs)
return True
except Exception as error:
logging.exception(error)
return False | python | def run_initializer(initializer, initargs):
"""Runs the Pool initializer dealing with errors."""
try:
initializer(*initargs)
return True
except Exception as error:
logging.exception(error)
return False | [
"def",
"run_initializer",
"(",
"initializer",
",",
"initargs",
")",
":",
"try",
":",
"initializer",
"(",
"*",
"initargs",
")",
"return",
"True",
"except",
"Exception",
"as",
"error",
":",
"logging",
".",
"exception",
"(",
"error",
")",
"return",
"False"
] | Runs the Pool initializer dealing with errors. | [
"Runs",
"the",
"Pool",
"initializer",
"dealing",
"with",
"errors",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/base_pool.py#L239-L246 | train |
noxdafox/pebble | pebble/pool/base_pool.py | BasePool.join | def join(self, timeout=None):
"""Joins the pool waiting until all workers exited.
If *timeout* is set, it block until all workers are done
or raises TimeoutError.
"""
if self._context.state == RUNNING:
raise RuntimeError('The Pool is still running')
if self._context.state == CLOSED:
self._wait_queue_depletion(timeout)
self.stop()
self.join()
else:
self._context.task_queue.put(None)
self._stop_pool() | python | def join(self, timeout=None):
"""Joins the pool waiting until all workers exited.
If *timeout* is set, it block until all workers are done
or raises TimeoutError.
"""
if self._context.state == RUNNING:
raise RuntimeError('The Pool is still running')
if self._context.state == CLOSED:
self._wait_queue_depletion(timeout)
self.stop()
self.join()
else:
self._context.task_queue.put(None)
self._stop_pool() | [
"def",
"join",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"if",
"self",
".",
"_context",
".",
"state",
"==",
"RUNNING",
":",
"raise",
"RuntimeError",
"(",
"'The Pool is still running'",
")",
"if",
"self",
".",
"_context",
".",
"state",
"==",
"CLOSED",
":",
"self",
".",
"_wait_queue_depletion",
"(",
"timeout",
")",
"self",
".",
"stop",
"(",
")",
"self",
".",
"join",
"(",
")",
"else",
":",
"self",
".",
"_context",
".",
"task_queue",
".",
"put",
"(",
"None",
")",
"self",
".",
"_stop_pool",
"(",
")"
] | Joins the pool waiting until all workers exited.
If *timeout* is set, it block until all workers are done
or raises TimeoutError. | [
"Joins",
"the",
"pool",
"waiting",
"until",
"all",
"workers",
"exited",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/base_pool.py#L63-L77 | train |
noxdafox/pebble | pebble/concurrent/thread.py | thread | def thread(function):
"""Runs the decorated function within a concurrent thread,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called.
"""
@wraps(function)
def wrapper(*args, **kwargs):
future = Future()
launch_thread(_function_handler, function, args, kwargs, future)
return future
return wrapper | python | def thread(function):
"""Runs the decorated function within a concurrent thread,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called.
"""
@wraps(function)
def wrapper(*args, **kwargs):
future = Future()
launch_thread(_function_handler, function, args, kwargs, future)
return future
return wrapper | [
"def",
"thread",
"(",
"function",
")",
":",
"@",
"wraps",
"(",
"function",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"future",
"=",
"Future",
"(",
")",
"launch_thread",
"(",
"_function_handler",
",",
"function",
",",
"args",
",",
"kwargs",
",",
"future",
")",
"return",
"future",
"return",
"wrapper"
] | Runs the decorated function within a concurrent thread,
taking care of the result and error management.
Decorated functions will return a concurrent.futures.Future object
once called. | [
"Runs",
"the",
"decorated",
"function",
"within",
"a",
"concurrent",
"thread",
"taking",
"care",
"of",
"the",
"result",
"and",
"error",
"management",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/thread.py#L24-L40 | train |
noxdafox/pebble | pebble/concurrent/thread.py | _function_handler | def _function_handler(function, args, kwargs, future):
"""Runs the actual function in separate thread and returns its result."""
future.set_running_or_notify_cancel()
try:
result = function(*args, **kwargs)
except BaseException as error:
error.traceback = format_exc()
future.set_exception(error)
else:
future.set_result(result) | python | def _function_handler(function, args, kwargs, future):
"""Runs the actual function in separate thread and returns its result."""
future.set_running_or_notify_cancel()
try:
result = function(*args, **kwargs)
except BaseException as error:
error.traceback = format_exc()
future.set_exception(error)
else:
future.set_result(result) | [
"def",
"_function_handler",
"(",
"function",
",",
"args",
",",
"kwargs",
",",
"future",
")",
":",
"future",
".",
"set_running_or_notify_cancel",
"(",
")",
"try",
":",
"result",
"=",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"BaseException",
"as",
"error",
":",
"error",
".",
"traceback",
"=",
"format_exc",
"(",
")",
"future",
".",
"set_exception",
"(",
"error",
")",
"else",
":",
"future",
".",
"set_result",
"(",
"result",
")"
] | Runs the actual function in separate thread and returns its result. | [
"Runs",
"the",
"actual",
"function",
"in",
"separate",
"thread",
"and",
"returns",
"its",
"result",
"."
] | d8f3d989655715754f0a65d7419cfa584491f614 | https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/concurrent/thread.py#L43-L53 | train |
SwoopSearch/pyaddress | address/address.py | create_cities_csv | def create_cities_csv(filename="places2k.txt", output="cities.csv"):
"""
Takes the places2k.txt from USPS and creates a simple file of all cities.
"""
with open(filename, 'r') as city_file:
with open(output, 'w') as out:
for line in city_file:
# Drop Puerto Rico (just looking for the 50 states)
if line[0:2] == "PR":
continue
# Per census.gov, characters 9-72 are the name of the city or place. Cut ,off the last part, which is city, town, etc.
# print " ".join(line[9:72].split()[:-1])
out.write(" ".join(line[9:72].split()[:-1]) + '\n') | python | def create_cities_csv(filename="places2k.txt", output="cities.csv"):
"""
Takes the places2k.txt from USPS and creates a simple file of all cities.
"""
with open(filename, 'r') as city_file:
with open(output, 'w') as out:
for line in city_file:
# Drop Puerto Rico (just looking for the 50 states)
if line[0:2] == "PR":
continue
# Per census.gov, characters 9-72 are the name of the city or place. Cut ,off the last part, which is city, town, etc.
# print " ".join(line[9:72].split()[:-1])
out.write(" ".join(line[9:72].split()[:-1]) + '\n') | [
"def",
"create_cities_csv",
"(",
"filename",
"=",
"\"places2k.txt\"",
",",
"output",
"=",
"\"cities.csv\"",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"city_file",
":",
"with",
"open",
"(",
"output",
",",
"'w'",
")",
"as",
"out",
":",
"for",
"line",
"in",
"city_file",
":",
"# Drop Puerto Rico (just looking for the 50 states)",
"if",
"line",
"[",
"0",
":",
"2",
"]",
"==",
"\"PR\"",
":",
"continue",
"# Per census.gov, characters 9-72 are the name of the city or place. Cut ,off the last part, which is city, town, etc.",
"# print \" \".join(line[9:72].split()[:-1])",
"out",
".",
"write",
"(",
"\" \"",
".",
"join",
"(",
"line",
"[",
"9",
":",
"72",
"]",
".",
"split",
"(",
")",
"[",
":",
"-",
"1",
"]",
")",
"+",
"'\\n'",
")"
] | Takes the places2k.txt from USPS and creates a simple file of all cities. | [
"Takes",
"the",
"places2k",
".",
"txt",
"from",
"USPS",
"and",
"creates",
"a",
"simple",
"file",
"of",
"all",
"cities",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L674-L686 | train |
SwoopSearch/pyaddress | address/address.py | AddressParser.parse_address | def parse_address(self, address, line_number=-1):
"""
Return an Address object from the given address. Passes itself to the Address constructor to use all the custom
loaded suffixes, cities, etc.
"""
return Address(address, self, line_number, self.logger) | python | def parse_address(self, address, line_number=-1):
"""
Return an Address object from the given address. Passes itself to the Address constructor to use all the custom
loaded suffixes, cities, etc.
"""
return Address(address, self, line_number, self.logger) | [
"def",
"parse_address",
"(",
"self",
",",
"address",
",",
"line_number",
"=",
"-",
"1",
")",
":",
"return",
"Address",
"(",
"address",
",",
"self",
",",
"line_number",
",",
"self",
".",
"logger",
")"
] | Return an Address object from the given address. Passes itself to the Address constructor to use all the custom
loaded suffixes, cities, etc. | [
"Return",
"an",
"Address",
"object",
"from",
"the",
"given",
"address",
".",
"Passes",
"itself",
"to",
"the",
"Address",
"constructor",
"to",
"use",
"all",
"the",
"custom",
"loaded",
"suffixes",
"cities",
"etc",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L83-L88 | train |
SwoopSearch/pyaddress | address/address.py | AddressParser.load_cities | def load_cities(self, filename):
"""
Load up all cities in lowercase for easier matching. The file should have one city per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy.
"""
with open(filename, 'r') as f:
for line in f:
self.cities.append(line.strip().lower()) | python | def load_cities(self, filename):
"""
Load up all cities in lowercase for easier matching. The file should have one city per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy.
"""
with open(filename, 'r') as f:
for line in f:
self.cities.append(line.strip().lower()) | [
"def",
"load_cities",
"(",
"self",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"self",
".",
"cities",
".",
"append",
"(",
"line",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
")"
] | Load up all cities in lowercase for easier matching. The file should have one city per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy. | [
"Load",
"up",
"all",
"cities",
"in",
"lowercase",
"for",
"easier",
"matching",
".",
"The",
"file",
"should",
"have",
"one",
"city",
"per",
"line",
"with",
"no",
"extra",
"characters",
".",
"This",
"isn",
"t",
"strictly",
"required",
"but",
"will",
"vastly",
"increase",
"the",
"accuracy",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L128-L135 | train |
SwoopSearch/pyaddress | address/address.py | AddressParser.load_streets | def load_streets(self, filename):
"""
Load up all streets in lowercase for easier matching. The file should have one street per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy.
"""
with open(filename, 'r') as f:
for line in f:
self.streets.append(line.strip().lower()) | python | def load_streets(self, filename):
"""
Load up all streets in lowercase for easier matching. The file should have one street per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy.
"""
with open(filename, 'r') as f:
for line in f:
self.streets.append(line.strip().lower()) | [
"def",
"load_streets",
"(",
"self",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"self",
".",
"streets",
".",
"append",
"(",
"line",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
")"
] | Load up all streets in lowercase for easier matching. The file should have one street per line, with no extra
characters. This isn't strictly required, but will vastly increase the accuracy. | [
"Load",
"up",
"all",
"streets",
"in",
"lowercase",
"for",
"easier",
"matching",
".",
"The",
"file",
"should",
"have",
"one",
"street",
"per",
"line",
"with",
"no",
"extra",
"characters",
".",
"This",
"isn",
"t",
"strictly",
"required",
"but",
"will",
"vastly",
"increase",
"the",
"accuracy",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L137-L144 | train |
SwoopSearch/pyaddress | address/address.py | Address.preprocess_address | def preprocess_address(self, address):
"""
Takes a basic address and attempts to clean it up, extract reasonably assured bits that may throw off the
rest of the parsing, and return the cleaned address.
"""
# Run some basic cleaning
address = address.replace("# ", "#")
address = address.replace(" & ", "&")
# Clear the address of things like 'X units', which shouldn't be in an address anyway. We won't save this for now.
if re.search(r"-?-?\w+ units", address, re.IGNORECASE):
address = re.sub(r"-?-?\w+ units", "", address, flags=re.IGNORECASE)
# Sometimes buildings are put in parantheses.
# building_match = re.search(r"\(.*\)", address, re.IGNORECASE)
# if building_match:
# self.building = self._clean(building_match.group().replace('(', '').replace(')', ''))
# address = re.sub(r"\(.*\)", "", address, flags=re.IGNORECASE)
# Now let's get the apartment stuff out of the way. Using only sure match regexes, delete apartment parts from
# the address. This prevents things like "Unit" being the street name.
apartment_regexes = [r'#\w+ & \w+', '#\w+ rm \w+', "#\w+-\w", r'apt #{0,1}\w+', r'apartment #{0,1}\w+', r'#\w+',
r'# \w+', r'rm \w+', r'unit #?\w+', r'units #?\w+', r'- #{0,1}\w+', r'no\s?\d+\w*',
r'style\s\w{1,2}', r'townhouse style\s\w{1,2}']
for regex in apartment_regexes:
apartment_match = re.search(regex, address, re.IGNORECASE)
if apartment_match:
# print "Matched regex: ", regex, apartment_match.group()
self.apartment = self._clean(apartment_match.group())
address = re.sub(regex, "", address, flags=re.IGNORECASE)
# Now check for things like ", ," which throw off dstk
address = re.sub(r"\,\s*\,", ",", address)
return address | python | def preprocess_address(self, address):
"""
Takes a basic address and attempts to clean it up, extract reasonably assured bits that may throw off the
rest of the parsing, and return the cleaned address.
"""
# Run some basic cleaning
address = address.replace("# ", "#")
address = address.replace(" & ", "&")
# Clear the address of things like 'X units', which shouldn't be in an address anyway. We won't save this for now.
if re.search(r"-?-?\w+ units", address, re.IGNORECASE):
address = re.sub(r"-?-?\w+ units", "", address, flags=re.IGNORECASE)
# Sometimes buildings are put in parantheses.
# building_match = re.search(r"\(.*\)", address, re.IGNORECASE)
# if building_match:
# self.building = self._clean(building_match.group().replace('(', '').replace(')', ''))
# address = re.sub(r"\(.*\)", "", address, flags=re.IGNORECASE)
# Now let's get the apartment stuff out of the way. Using only sure match regexes, delete apartment parts from
# the address. This prevents things like "Unit" being the street name.
apartment_regexes = [r'#\w+ & \w+', '#\w+ rm \w+', "#\w+-\w", r'apt #{0,1}\w+', r'apartment #{0,1}\w+', r'#\w+',
r'# \w+', r'rm \w+', r'unit #?\w+', r'units #?\w+', r'- #{0,1}\w+', r'no\s?\d+\w*',
r'style\s\w{1,2}', r'townhouse style\s\w{1,2}']
for regex in apartment_regexes:
apartment_match = re.search(regex, address, re.IGNORECASE)
if apartment_match:
# print "Matched regex: ", regex, apartment_match.group()
self.apartment = self._clean(apartment_match.group())
address = re.sub(regex, "", address, flags=re.IGNORECASE)
# Now check for things like ", ," which throw off dstk
address = re.sub(r"\,\s*\,", ",", address)
return address | [
"def",
"preprocess_address",
"(",
"self",
",",
"address",
")",
":",
"# Run some basic cleaning",
"address",
"=",
"address",
".",
"replace",
"(",
"\"# \"",
",",
"\"#\"",
")",
"address",
"=",
"address",
".",
"replace",
"(",
"\" & \"",
",",
"\"&\"",
")",
"# Clear the address of things like 'X units', which shouldn't be in an address anyway. We won't save this for now.",
"if",
"re",
".",
"search",
"(",
"r\"-?-?\\w+ units\"",
",",
"address",
",",
"re",
".",
"IGNORECASE",
")",
":",
"address",
"=",
"re",
".",
"sub",
"(",
"r\"-?-?\\w+ units\"",
",",
"\"\"",
",",
"address",
",",
"flags",
"=",
"re",
".",
"IGNORECASE",
")",
"# Sometimes buildings are put in parantheses.",
"# building_match = re.search(r\"\\(.*\\)\", address, re.IGNORECASE)",
"# if building_match:",
"# self.building = self._clean(building_match.group().replace('(', '').replace(')', ''))",
"# address = re.sub(r\"\\(.*\\)\", \"\", address, flags=re.IGNORECASE)",
"# Now let's get the apartment stuff out of the way. Using only sure match regexes, delete apartment parts from",
"# the address. This prevents things like \"Unit\" being the street name.",
"apartment_regexes",
"=",
"[",
"r'#\\w+ & \\w+'",
",",
"'#\\w+ rm \\w+'",
",",
"\"#\\w+-\\w\"",
",",
"r'apt #{0,1}\\w+'",
",",
"r'apartment #{0,1}\\w+'",
",",
"r'#\\w+'",
",",
"r'# \\w+'",
",",
"r'rm \\w+'",
",",
"r'unit #?\\w+'",
",",
"r'units #?\\w+'",
",",
"r'- #{0,1}\\w+'",
",",
"r'no\\s?\\d+\\w*'",
",",
"r'style\\s\\w{1,2}'",
",",
"r'townhouse style\\s\\w{1,2}'",
"]",
"for",
"regex",
"in",
"apartment_regexes",
":",
"apartment_match",
"=",
"re",
".",
"search",
"(",
"regex",
",",
"address",
",",
"re",
".",
"IGNORECASE",
")",
"if",
"apartment_match",
":",
"# print \"Matched regex: \", regex, apartment_match.group()",
"self",
".",
"apartment",
"=",
"self",
".",
"_clean",
"(",
"apartment_match",
".",
"group",
"(",
")",
")",
"address",
"=",
"re",
".",
"sub",
"(",
"regex",
",",
"\"\"",
",",
"address",
",",
"flags",
"=",
"re",
".",
"IGNORECASE",
")",
"# Now check for things like \", ,\" which throw off dstk",
"address",
"=",
"re",
".",
"sub",
"(",
"r\"\\,\\s*\\,\"",
",",
"\",\"",
",",
"address",
")",
"return",
"address"
] | Takes a basic address and attempts to clean it up, extract reasonably assured bits that may throw off the
rest of the parsing, and return the cleaned address. | [
"Takes",
"a",
"basic",
"address",
"and",
"attempts",
"to",
"clean",
"it",
"up",
"extract",
"reasonably",
"assured",
"bits",
"that",
"may",
"throw",
"off",
"the",
"rest",
"of",
"the",
"parsing",
"and",
"return",
"the",
"cleaned",
"address",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L250-L279 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_state | def check_state(self, token):
"""
Check if state is in either the keys or values of our states list. Must come before the suffix.
"""
# print "zip", self.zip
if len(token) == 2 and self.state is None:
if token.capitalize() in self.parser.states.keys():
self.state = self._clean(self.parser.states[token.capitalize()])
return True
elif token.upper() in self.parser.states.values():
self.state = self._clean(token.upper())
return True
if self.state is None and self.street_suffix is None and len(self.comma_separated_address) > 1:
if token.capitalize() in self.parser.states.keys():
self.state = self._clean(self.parser.states[token.capitalize()])
return True
elif token.upper() in self.parser.states.values():
self.state = self._clean(token.upper())
return True
return False | python | def check_state(self, token):
"""
Check if state is in either the keys or values of our states list. Must come before the suffix.
"""
# print "zip", self.zip
if len(token) == 2 and self.state is None:
if token.capitalize() in self.parser.states.keys():
self.state = self._clean(self.parser.states[token.capitalize()])
return True
elif token.upper() in self.parser.states.values():
self.state = self._clean(token.upper())
return True
if self.state is None and self.street_suffix is None and len(self.comma_separated_address) > 1:
if token.capitalize() in self.parser.states.keys():
self.state = self._clean(self.parser.states[token.capitalize()])
return True
elif token.upper() in self.parser.states.values():
self.state = self._clean(token.upper())
return True
return False | [
"def",
"check_state",
"(",
"self",
",",
"token",
")",
":",
"# print \"zip\", self.zip",
"if",
"len",
"(",
"token",
")",
"==",
"2",
"and",
"self",
".",
"state",
"is",
"None",
":",
"if",
"token",
".",
"capitalize",
"(",
")",
"in",
"self",
".",
"parser",
".",
"states",
".",
"keys",
"(",
")",
":",
"self",
".",
"state",
"=",
"self",
".",
"_clean",
"(",
"self",
".",
"parser",
".",
"states",
"[",
"token",
".",
"capitalize",
"(",
")",
"]",
")",
"return",
"True",
"elif",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"states",
".",
"values",
"(",
")",
":",
"self",
".",
"state",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"upper",
"(",
")",
")",
"return",
"True",
"if",
"self",
".",
"state",
"is",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"None",
"and",
"len",
"(",
"self",
".",
"comma_separated_address",
")",
">",
"1",
":",
"if",
"token",
".",
"capitalize",
"(",
")",
"in",
"self",
".",
"parser",
".",
"states",
".",
"keys",
"(",
")",
":",
"self",
".",
"state",
"=",
"self",
".",
"_clean",
"(",
"self",
".",
"parser",
".",
"states",
"[",
"token",
".",
"capitalize",
"(",
")",
"]",
")",
"return",
"True",
"elif",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"states",
".",
"values",
"(",
")",
":",
"self",
".",
"state",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"upper",
"(",
")",
")",
"return",
"True",
"return",
"False"
] | Check if state is in either the keys or values of our states list. Must come before the suffix. | [
"Check",
"if",
"state",
"is",
"in",
"either",
"the",
"keys",
"or",
"values",
"of",
"our",
"states",
"list",
".",
"Must",
"come",
"before",
"the",
"suffix",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L297-L316 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_city | def check_city(self, token):
"""
Check if there is a known city from our city list. Must come before the suffix.
"""
shortened_cities = {'saint': 'st.'}
if self.city is None and self.state is not None and self.street_suffix is None:
if token.lower() in self.parser.cities:
self.city = self._clean(token.capitalize())
return True
return False
# Check that we're in the correct location, and that we have at least one comma in the address
if self.city is None and self.apartment is None and self.street_suffix is None and len(
self.comma_separated_address) > 1:
if token.lower() in self.parser.cities:
self.city = self._clean(token.capitalize())
return True
return False
# Multi word cities
if self.city is not None and self.street_suffix is None and self.street is None:
print "Checking for multi part city", token.lower(), token.lower() in shortened_cities.keys()
if token.lower() + ' ' + self.city in self.parser.cities:
self.city = self._clean((token.lower() + ' ' + self.city).capitalize())
return True
if token.lower() in shortened_cities.keys():
token = shortened_cities[token.lower()]
print "Checking for shorted multi part city", token.lower() + ' ' + self.city
if token.lower() + ' ' + self.city.lower() in self.parser.cities:
self.city = self._clean(token.capitalize() + ' ' + self.city.capitalize())
return True | python | def check_city(self, token):
"""
Check if there is a known city from our city list. Must come before the suffix.
"""
shortened_cities = {'saint': 'st.'}
if self.city is None and self.state is not None and self.street_suffix is None:
if token.lower() in self.parser.cities:
self.city = self._clean(token.capitalize())
return True
return False
# Check that we're in the correct location, and that we have at least one comma in the address
if self.city is None and self.apartment is None and self.street_suffix is None and len(
self.comma_separated_address) > 1:
if token.lower() in self.parser.cities:
self.city = self._clean(token.capitalize())
return True
return False
# Multi word cities
if self.city is not None and self.street_suffix is None and self.street is None:
print "Checking for multi part city", token.lower(), token.lower() in shortened_cities.keys()
if token.lower() + ' ' + self.city in self.parser.cities:
self.city = self._clean((token.lower() + ' ' + self.city).capitalize())
return True
if token.lower() in shortened_cities.keys():
token = shortened_cities[token.lower()]
print "Checking for shorted multi part city", token.lower() + ' ' + self.city
if token.lower() + ' ' + self.city.lower() in self.parser.cities:
self.city = self._clean(token.capitalize() + ' ' + self.city.capitalize())
return True | [
"def",
"check_city",
"(",
"self",
",",
"token",
")",
":",
"shortened_cities",
"=",
"{",
"'saint'",
":",
"'st.'",
"}",
"if",
"self",
".",
"city",
"is",
"None",
"and",
"self",
".",
"state",
"is",
"not",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"None",
":",
"if",
"token",
".",
"lower",
"(",
")",
"in",
"self",
".",
"parser",
".",
"cities",
":",
"self",
".",
"city",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
")",
"return",
"True",
"return",
"False",
"# Check that we're in the correct location, and that we have at least one comma in the address",
"if",
"self",
".",
"city",
"is",
"None",
"and",
"self",
".",
"apartment",
"is",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"None",
"and",
"len",
"(",
"self",
".",
"comma_separated_address",
")",
">",
"1",
":",
"if",
"token",
".",
"lower",
"(",
")",
"in",
"self",
".",
"parser",
".",
"cities",
":",
"self",
".",
"city",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
")",
"return",
"True",
"return",
"False",
"# Multi word cities",
"if",
"self",
".",
"city",
"is",
"not",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"None",
"and",
"self",
".",
"street",
"is",
"None",
":",
"print",
"\"Checking for multi part city\"",
",",
"token",
".",
"lower",
"(",
")",
",",
"token",
".",
"lower",
"(",
")",
"in",
"shortened_cities",
".",
"keys",
"(",
")",
"if",
"token",
".",
"lower",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"city",
"in",
"self",
".",
"parser",
".",
"cities",
":",
"self",
".",
"city",
"=",
"self",
".",
"_clean",
"(",
"(",
"token",
".",
"lower",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"city",
")",
".",
"capitalize",
"(",
")",
")",
"return",
"True",
"if",
"token",
".",
"lower",
"(",
")",
"in",
"shortened_cities",
".",
"keys",
"(",
")",
":",
"token",
"=",
"shortened_cities",
"[",
"token",
".",
"lower",
"(",
")",
"]",
"print",
"\"Checking for shorted multi part city\"",
",",
"token",
".",
"lower",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"city",
"if",
"token",
".",
"lower",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"city",
".",
"lower",
"(",
")",
"in",
"self",
".",
"parser",
".",
"cities",
":",
"self",
".",
"city",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"city",
".",
"capitalize",
"(",
")",
")",
"return",
"True"
] | Check if there is a known city from our city list. Must come before the suffix. | [
"Check",
"if",
"there",
"is",
"a",
"known",
"city",
"from",
"our",
"city",
"list",
".",
"Must",
"come",
"before",
"the",
"suffix",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L318-L346 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_street_suffix | def check_street_suffix(self, token):
"""
Attempts to match a street suffix. If found, it will return the abbreviation, with the first letter capitalized
and a period after it. E.g. "St." or "Ave."
"""
# Suffix must come before street
# print "Suffix check", token, "suffix", self.street_suffix, "street", self.street
if self.street_suffix is None and self.street is None:
# print "upper", token.upper()
if token.upper() in self.parser.suffixes.keys():
suffix = self.parser.suffixes[token.upper()]
self.street_suffix = self._clean(suffix.capitalize() + '.')
return True
elif token.upper() in self.parser.suffixes.values():
self.street_suffix = self._clean(token.capitalize() + '.')
return True
return False | python | def check_street_suffix(self, token):
"""
Attempts to match a street suffix. If found, it will return the abbreviation, with the first letter capitalized
and a period after it. E.g. "St." or "Ave."
"""
# Suffix must come before street
# print "Suffix check", token, "suffix", self.street_suffix, "street", self.street
if self.street_suffix is None and self.street is None:
# print "upper", token.upper()
if token.upper() in self.parser.suffixes.keys():
suffix = self.parser.suffixes[token.upper()]
self.street_suffix = self._clean(suffix.capitalize() + '.')
return True
elif token.upper() in self.parser.suffixes.values():
self.street_suffix = self._clean(token.capitalize() + '.')
return True
return False | [
"def",
"check_street_suffix",
"(",
"self",
",",
"token",
")",
":",
"# Suffix must come before street",
"# print \"Suffix check\", token, \"suffix\", self.street_suffix, \"street\", self.street",
"if",
"self",
".",
"street_suffix",
"is",
"None",
"and",
"self",
".",
"street",
"is",
"None",
":",
"# print \"upper\", token.upper()",
"if",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"suffixes",
".",
"keys",
"(",
")",
":",
"suffix",
"=",
"self",
".",
"parser",
".",
"suffixes",
"[",
"token",
".",
"upper",
"(",
")",
"]",
"self",
".",
"street_suffix",
"=",
"self",
".",
"_clean",
"(",
"suffix",
".",
"capitalize",
"(",
")",
"+",
"'.'",
")",
"return",
"True",
"elif",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"suffixes",
".",
"values",
"(",
")",
":",
"self",
".",
"street_suffix",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
"+",
"'.'",
")",
"return",
"True",
"return",
"False"
] | Attempts to match a street suffix. If found, it will return the abbreviation, with the first letter capitalized
and a period after it. E.g. "St." or "Ave." | [
"Attempts",
"to",
"match",
"a",
"street",
"suffix",
".",
"If",
"found",
"it",
"will",
"return",
"the",
"abbreviation",
"with",
"the",
"first",
"letter",
"capitalized",
"and",
"a",
"period",
"after",
"it",
".",
"E",
".",
"g",
".",
"St",
".",
"or",
"Ave",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L377-L393 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_street | def check_street(self, token):
"""
Let's assume a street comes before a prefix and after a suffix. This isn't always the case, but we'll deal
with that in our guessing game. Also, two word street names...well...
This check must come after the checks for house_number and street_prefix to help us deal with multi word streets.
"""
# First check for single word streets between a prefix and a suffix
if self.street is None and self.street_suffix is not None and self.street_prefix is None and self.house_number is None:
self.street = self._clean(token.capitalize())
return True
# Now check for multiple word streets. This check must come after the check for street_prefix and house_number for this reason.
elif self.street is not None and self.street_suffix is not None and self.street_prefix is None and self.house_number is None:
self.street = self._clean(token.capitalize() + ' ' + self.street)
return True
if not self.street_suffix and not self.street and token.lower() in self.parser.streets:
self.street = self._clean(token)
return True
return False | python | def check_street(self, token):
"""
Let's assume a street comes before a prefix and after a suffix. This isn't always the case, but we'll deal
with that in our guessing game. Also, two word street names...well...
This check must come after the checks for house_number and street_prefix to help us deal with multi word streets.
"""
# First check for single word streets between a prefix and a suffix
if self.street is None and self.street_suffix is not None and self.street_prefix is None and self.house_number is None:
self.street = self._clean(token.capitalize())
return True
# Now check for multiple word streets. This check must come after the check for street_prefix and house_number for this reason.
elif self.street is not None and self.street_suffix is not None and self.street_prefix is None and self.house_number is None:
self.street = self._clean(token.capitalize() + ' ' + self.street)
return True
if not self.street_suffix and not self.street and token.lower() in self.parser.streets:
self.street = self._clean(token)
return True
return False | [
"def",
"check_street",
"(",
"self",
",",
"token",
")",
":",
"# First check for single word streets between a prefix and a suffix",
"if",
"self",
".",
"street",
"is",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"not",
"None",
"and",
"self",
".",
"street_prefix",
"is",
"None",
"and",
"self",
".",
"house_number",
"is",
"None",
":",
"self",
".",
"street",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
")",
"return",
"True",
"# Now check for multiple word streets. This check must come after the check for street_prefix and house_number for this reason.",
"elif",
"self",
".",
"street",
"is",
"not",
"None",
"and",
"self",
".",
"street_suffix",
"is",
"not",
"None",
"and",
"self",
".",
"street_prefix",
"is",
"None",
"and",
"self",
".",
"house_number",
"is",
"None",
":",
"self",
".",
"street",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
"+",
"' '",
"+",
"self",
".",
"street",
")",
"return",
"True",
"if",
"not",
"self",
".",
"street_suffix",
"and",
"not",
"self",
".",
"street",
"and",
"token",
".",
"lower",
"(",
")",
"in",
"self",
".",
"parser",
".",
"streets",
":",
"self",
".",
"street",
"=",
"self",
".",
"_clean",
"(",
"token",
")",
"return",
"True",
"return",
"False"
] | Let's assume a street comes before a prefix and after a suffix. This isn't always the case, but we'll deal
with that in our guessing game. Also, two word street names...well...
This check must come after the checks for house_number and street_prefix to help us deal with multi word streets. | [
"Let",
"s",
"assume",
"a",
"street",
"comes",
"before",
"a",
"prefix",
"and",
"after",
"a",
"suffix",
".",
"This",
"isn",
"t",
"always",
"the",
"case",
"but",
"we",
"ll",
"deal",
"with",
"that",
"in",
"our",
"guessing",
"game",
".",
"Also",
"two",
"word",
"street",
"names",
"...",
"well",
"..."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L395-L413 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_street_prefix | def check_street_prefix(self, token):
"""
Finds street prefixes, such as N. or Northwest, before a street name. Standardizes to 1 or two letters, followed
by a period.
"""
if self.street and not self.street_prefix and token.lower().replace('.', '') in self.parser.prefixes.keys():
self.street_prefix = self._clean(self.parser.prefixes[token.lower().replace('.', '')])
return True
return False | python | def check_street_prefix(self, token):
"""
Finds street prefixes, such as N. or Northwest, before a street name. Standardizes to 1 or two letters, followed
by a period.
"""
if self.street and not self.street_prefix and token.lower().replace('.', '') in self.parser.prefixes.keys():
self.street_prefix = self._clean(self.parser.prefixes[token.lower().replace('.', '')])
return True
return False | [
"def",
"check_street_prefix",
"(",
"self",
",",
"token",
")",
":",
"if",
"self",
".",
"street",
"and",
"not",
"self",
".",
"street_prefix",
"and",
"token",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
"in",
"self",
".",
"parser",
".",
"prefixes",
".",
"keys",
"(",
")",
":",
"self",
".",
"street_prefix",
"=",
"self",
".",
"_clean",
"(",
"self",
".",
"parser",
".",
"prefixes",
"[",
"token",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
"]",
")",
"return",
"True",
"return",
"False"
] | Finds street prefixes, such as N. or Northwest, before a street name. Standardizes to 1 or two letters, followed
by a period. | [
"Finds",
"street",
"prefixes",
"such",
"as",
"N",
".",
"or",
"Northwest",
"before",
"a",
"street",
"name",
".",
"Standardizes",
"to",
"1",
"or",
"two",
"letters",
"followed",
"by",
"a",
"period",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L415-L423 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_house_number | def check_house_number(self, token):
"""
Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name.
"""
if self.street and self.house_number is None and re.match(street_num_regex, token.lower()):
if '/' in token:
token = token.split('/')[0]
if '-' in token:
token = token.split('-')[0]
self.house_number = self._clean(str(token))
return True
return False | python | def check_house_number(self, token):
"""
Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name.
"""
if self.street and self.house_number is None and re.match(street_num_regex, token.lower()):
if '/' in token:
token = token.split('/')[0]
if '-' in token:
token = token.split('-')[0]
self.house_number = self._clean(str(token))
return True
return False | [
"def",
"check_house_number",
"(",
"self",
",",
"token",
")",
":",
"if",
"self",
".",
"street",
"and",
"self",
".",
"house_number",
"is",
"None",
"and",
"re",
".",
"match",
"(",
"street_num_regex",
",",
"token",
".",
"lower",
"(",
")",
")",
":",
"if",
"'/'",
"in",
"token",
":",
"token",
"=",
"token",
".",
"split",
"(",
"'/'",
")",
"[",
"0",
"]",
"if",
"'-'",
"in",
"token",
":",
"token",
"=",
"token",
".",
"split",
"(",
"'-'",
")",
"[",
"0",
"]",
"self",
".",
"house_number",
"=",
"self",
".",
"_clean",
"(",
"str",
"(",
"token",
")",
")",
"return",
"True",
"return",
"False"
] | Attempts to find a house number, generally the first thing in an address. If anything is in front of it,
we assume it is a building name. | [
"Attempts",
"to",
"find",
"a",
"house",
"number",
"generally",
"the",
"first",
"thing",
"in",
"an",
"address",
".",
"If",
"anything",
"is",
"in",
"front",
"of",
"it",
"we",
"assume",
"it",
"is",
"a",
"building",
"name",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L425-L437 | train |
SwoopSearch/pyaddress | address/address.py | Address.check_building | def check_building(self, token):
"""
Building name check. If we have leftover and everything else is set, probably building names.
Allows for multi word building names.
"""
if self.street and self.house_number:
if not self.building:
self.building = self._clean(token)
else:
self.building = self._clean(token + ' ' + self.building)
return True
return False | python | def check_building(self, token):
"""
Building name check. If we have leftover and everything else is set, probably building names.
Allows for multi word building names.
"""
if self.street and self.house_number:
if not self.building:
self.building = self._clean(token)
else:
self.building = self._clean(token + ' ' + self.building)
return True
return False | [
"def",
"check_building",
"(",
"self",
",",
"token",
")",
":",
"if",
"self",
".",
"street",
"and",
"self",
".",
"house_number",
":",
"if",
"not",
"self",
".",
"building",
":",
"self",
".",
"building",
"=",
"self",
".",
"_clean",
"(",
"token",
")",
"else",
":",
"self",
".",
"building",
"=",
"self",
".",
"_clean",
"(",
"token",
"+",
"' '",
"+",
"self",
".",
"building",
")",
"return",
"True",
"return",
"False"
] | Building name check. If we have leftover and everything else is set, probably building names.
Allows for multi word building names. | [
"Building",
"name",
"check",
".",
"If",
"we",
"have",
"leftover",
"and",
"everything",
"else",
"is",
"set",
"probably",
"building",
"names",
".",
"Allows",
"for",
"multi",
"word",
"building",
"names",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L439-L450 | train |
SwoopSearch/pyaddress | address/address.py | Address.guess_unmatched | def guess_unmatched(self, token):
"""
When we find something that doesn't match, we can make an educated guess and log it as such.
"""
# Check if this is probably an apartment:
if token.lower() in ['apt', 'apartment']:
return False
# Stray dashes are likely useless
if token.strip() == '-':
return True
# Almost definitely not a street if it is one or two characters long.
if len(token) <= 2:
return False
# Let's check for a suffix-less street.
if self.street_suffix is None and self.street is None and self.street_prefix is None and self.house_number is None:
# Streets will just be letters
if re.match(r"[A-Za-z]", token):
if self.line_number >= 0:
pass
# print "{0}: Guessing suffix-less street: ".format(self.line_number), token
else:
# print "Guessing suffix-less street: ", token
pass
self.street = self._clean(token.capitalize())
return True
return False | python | def guess_unmatched(self, token):
"""
When we find something that doesn't match, we can make an educated guess and log it as such.
"""
# Check if this is probably an apartment:
if token.lower() in ['apt', 'apartment']:
return False
# Stray dashes are likely useless
if token.strip() == '-':
return True
# Almost definitely not a street if it is one or two characters long.
if len(token) <= 2:
return False
# Let's check for a suffix-less street.
if self.street_suffix is None and self.street is None and self.street_prefix is None and self.house_number is None:
# Streets will just be letters
if re.match(r"[A-Za-z]", token):
if self.line_number >= 0:
pass
# print "{0}: Guessing suffix-less street: ".format(self.line_number), token
else:
# print "Guessing suffix-less street: ", token
pass
self.street = self._clean(token.capitalize())
return True
return False | [
"def",
"guess_unmatched",
"(",
"self",
",",
"token",
")",
":",
"# Check if this is probably an apartment:",
"if",
"token",
".",
"lower",
"(",
")",
"in",
"[",
"'apt'",
",",
"'apartment'",
"]",
":",
"return",
"False",
"# Stray dashes are likely useless",
"if",
"token",
".",
"strip",
"(",
")",
"==",
"'-'",
":",
"return",
"True",
"# Almost definitely not a street if it is one or two characters long.",
"if",
"len",
"(",
"token",
")",
"<=",
"2",
":",
"return",
"False",
"# Let's check for a suffix-less street.",
"if",
"self",
".",
"street_suffix",
"is",
"None",
"and",
"self",
".",
"street",
"is",
"None",
"and",
"self",
".",
"street_prefix",
"is",
"None",
"and",
"self",
".",
"house_number",
"is",
"None",
":",
"# Streets will just be letters",
"if",
"re",
".",
"match",
"(",
"r\"[A-Za-z]\"",
",",
"token",
")",
":",
"if",
"self",
".",
"line_number",
">=",
"0",
":",
"pass",
"# print \"{0}: Guessing suffix-less street: \".format(self.line_number), token",
"else",
":",
"# print \"Guessing suffix-less street: \", token",
"pass",
"self",
".",
"street",
"=",
"self",
".",
"_clean",
"(",
"token",
".",
"capitalize",
"(",
")",
")",
"return",
"True",
"return",
"False"
] | When we find something that doesn't match, we can make an educated guess and log it as such. | [
"When",
"we",
"find",
"something",
"that",
"doesn",
"t",
"match",
"we",
"can",
"make",
"an",
"educated",
"guess",
"and",
"log",
"it",
"as",
"such",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L452-L477 | train |
SwoopSearch/pyaddress | address/address.py | Address.full_address | def full_address(self):
"""
Print the address in a human readable format
"""
addr = ""
# if self.building:
# addr = addr + "(" + self.building + ") "
if self.house_number:
addr = addr + self.house_number
if self.street_prefix:
addr = addr + " " + self.street_prefix
if self.street:
addr = addr + " " + self.street
if self.street_suffix:
addr = addr + " " + self.street_suffix
if self.apartment:
addr = addr + " " + self.apartment
if self.city:
addr = addr + ", " + self.city
if self.state:
addr = addr + ", " + self.state
if self.zip:
addr = addr + " " + self.zip
return addr | python | def full_address(self):
"""
Print the address in a human readable format
"""
addr = ""
# if self.building:
# addr = addr + "(" + self.building + ") "
if self.house_number:
addr = addr + self.house_number
if self.street_prefix:
addr = addr + " " + self.street_prefix
if self.street:
addr = addr + " " + self.street
if self.street_suffix:
addr = addr + " " + self.street_suffix
if self.apartment:
addr = addr + " " + self.apartment
if self.city:
addr = addr + ", " + self.city
if self.state:
addr = addr + ", " + self.state
if self.zip:
addr = addr + " " + self.zip
return addr | [
"def",
"full_address",
"(",
"self",
")",
":",
"addr",
"=",
"\"\"",
"# if self.building:",
"# addr = addr + \"(\" + self.building + \") \"",
"if",
"self",
".",
"house_number",
":",
"addr",
"=",
"addr",
"+",
"self",
".",
"house_number",
"if",
"self",
".",
"street_prefix",
":",
"addr",
"=",
"addr",
"+",
"\" \"",
"+",
"self",
".",
"street_prefix",
"if",
"self",
".",
"street",
":",
"addr",
"=",
"addr",
"+",
"\" \"",
"+",
"self",
".",
"street",
"if",
"self",
".",
"street_suffix",
":",
"addr",
"=",
"addr",
"+",
"\" \"",
"+",
"self",
".",
"street_suffix",
"if",
"self",
".",
"apartment",
":",
"addr",
"=",
"addr",
"+",
"\" \"",
"+",
"self",
".",
"apartment",
"if",
"self",
".",
"city",
":",
"addr",
"=",
"addr",
"+",
"\", \"",
"+",
"self",
".",
"city",
"if",
"self",
".",
"state",
":",
"addr",
"=",
"addr",
"+",
"\", \"",
"+",
"self",
".",
"state",
"if",
"self",
".",
"zip",
":",
"addr",
"=",
"addr",
"+",
"\" \"",
"+",
"self",
".",
"zip",
"return",
"addr"
] | Print the address in a human readable format | [
"Print",
"the",
"address",
"in",
"a",
"human",
"readable",
"format"
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L479-L502 | train |
SwoopSearch/pyaddress | address/address.py | Address._get_dstk_intersections | def _get_dstk_intersections(self, address, dstk_address):
"""
Find the unique tokens in the original address and the returned address.
"""
# Normalize both addresses
normalized_address = self._normalize(address)
normalized_dstk_address = self._normalize(dstk_address)
address_uniques = set(normalized_address) - set(normalized_dstk_address)
dstk_address_uniques = set(normalized_dstk_address) - set(normalized_address)
if self.logger: self.logger.debug("Address Uniques {0}".format(address_uniques))
if self.logger: self.logger.debug("DSTK Address Uniques {0}".format(dstk_address_uniques))
return (len(address_uniques), len(dstk_address_uniques)) | python | def _get_dstk_intersections(self, address, dstk_address):
"""
Find the unique tokens in the original address and the returned address.
"""
# Normalize both addresses
normalized_address = self._normalize(address)
normalized_dstk_address = self._normalize(dstk_address)
address_uniques = set(normalized_address) - set(normalized_dstk_address)
dstk_address_uniques = set(normalized_dstk_address) - set(normalized_address)
if self.logger: self.logger.debug("Address Uniques {0}".format(address_uniques))
if self.logger: self.logger.debug("DSTK Address Uniques {0}".format(dstk_address_uniques))
return (len(address_uniques), len(dstk_address_uniques)) | [
"def",
"_get_dstk_intersections",
"(",
"self",
",",
"address",
",",
"dstk_address",
")",
":",
"# Normalize both addresses",
"normalized_address",
"=",
"self",
".",
"_normalize",
"(",
"address",
")",
"normalized_dstk_address",
"=",
"self",
".",
"_normalize",
"(",
"dstk_address",
")",
"address_uniques",
"=",
"set",
"(",
"normalized_address",
")",
"-",
"set",
"(",
"normalized_dstk_address",
")",
"dstk_address_uniques",
"=",
"set",
"(",
"normalized_dstk_address",
")",
"-",
"set",
"(",
"normalized_address",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Address Uniques {0}\"",
".",
"format",
"(",
"address_uniques",
")",
")",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"DSTK Address Uniques {0}\"",
".",
"format",
"(",
"dstk_address_uniques",
")",
")",
"return",
"(",
"len",
"(",
"address_uniques",
")",
",",
"len",
"(",
"dstk_address_uniques",
")",
")"
] | Find the unique tokens in the original address and the returned address. | [
"Find",
"the",
"unique",
"tokens",
"in",
"the",
"original",
"address",
"and",
"the",
"returned",
"address",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L637-L648 | train |
SwoopSearch/pyaddress | address/address.py | Address._normalize | def _normalize(self, address):
"""
Normalize prefixes, suffixes and other to make matching original to returned easier.
"""
normalized_address = []
if self.logger: self.logger.debug("Normalizing Address: {0}".format(address))
for token in address.split():
if token.upper() in self.parser.suffixes.keys():
normalized_address.append(self.parser.suffixes[token.upper()].lower())
elif token.upper() in self.parser.suffixes.values():
normalized_address.append(token.lower())
elif token.upper().replace('.', '') in self.parser.suffixes.values():
normalized_address.append(token.lower().replace('.', ''))
elif token.lower() in self.parser.prefixes.keys():
normalized_address.append(self.parser.prefixes[token.lower()].lower())
elif token.upper() in self.parser.prefixes.values():
normalized_address.append(token.lower()[:-1])
elif token.upper() + '.' in self.parser.prefixes.values():
normalized_address.append(token.lower())
else:
normalized_address.append(token.lower())
return normalized_address | python | def _normalize(self, address):
"""
Normalize prefixes, suffixes and other to make matching original to returned easier.
"""
normalized_address = []
if self.logger: self.logger.debug("Normalizing Address: {0}".format(address))
for token in address.split():
if token.upper() in self.parser.suffixes.keys():
normalized_address.append(self.parser.suffixes[token.upper()].lower())
elif token.upper() in self.parser.suffixes.values():
normalized_address.append(token.lower())
elif token.upper().replace('.', '') in self.parser.suffixes.values():
normalized_address.append(token.lower().replace('.', ''))
elif token.lower() in self.parser.prefixes.keys():
normalized_address.append(self.parser.prefixes[token.lower()].lower())
elif token.upper() in self.parser.prefixes.values():
normalized_address.append(token.lower()[:-1])
elif token.upper() + '.' in self.parser.prefixes.values():
normalized_address.append(token.lower())
else:
normalized_address.append(token.lower())
return normalized_address | [
"def",
"_normalize",
"(",
"self",
",",
"address",
")",
":",
"normalized_address",
"=",
"[",
"]",
"if",
"self",
".",
"logger",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Normalizing Address: {0}\"",
".",
"format",
"(",
"address",
")",
")",
"for",
"token",
"in",
"address",
".",
"split",
"(",
")",
":",
"if",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"suffixes",
".",
"keys",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"self",
".",
"parser",
".",
"suffixes",
"[",
"token",
".",
"upper",
"(",
")",
"]",
".",
"lower",
"(",
")",
")",
"elif",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"suffixes",
".",
"values",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"token",
".",
"lower",
"(",
")",
")",
"elif",
"token",
".",
"upper",
"(",
")",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
"in",
"self",
".",
"parser",
".",
"suffixes",
".",
"values",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"token",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
")",
"elif",
"token",
".",
"lower",
"(",
")",
"in",
"self",
".",
"parser",
".",
"prefixes",
".",
"keys",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"self",
".",
"parser",
".",
"prefixes",
"[",
"token",
".",
"lower",
"(",
")",
"]",
".",
"lower",
"(",
")",
")",
"elif",
"token",
".",
"upper",
"(",
")",
"in",
"self",
".",
"parser",
".",
"prefixes",
".",
"values",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"token",
".",
"lower",
"(",
")",
"[",
":",
"-",
"1",
"]",
")",
"elif",
"token",
".",
"upper",
"(",
")",
"+",
"'.'",
"in",
"self",
".",
"parser",
".",
"prefixes",
".",
"values",
"(",
")",
":",
"normalized_address",
".",
"append",
"(",
"token",
".",
"lower",
"(",
")",
")",
"else",
":",
"normalized_address",
".",
"append",
"(",
"token",
".",
"lower",
"(",
")",
")",
"return",
"normalized_address"
] | Normalize prefixes, suffixes and other to make matching original to returned easier. | [
"Normalize",
"prefixes",
"suffixes",
"and",
"other",
"to",
"make",
"matching",
"original",
"to",
"returned",
"easier",
"."
] | 62ebb07a6840e710d256406a8ec1d06abec0e1c4 | https://github.com/SwoopSearch/pyaddress/blob/62ebb07a6840e710d256406a8ec1d06abec0e1c4/address/address.py#L650-L671 | train |
AlexandreDecan/python-intervals | intervals.py | empty | def empty():
"""
Create an empty set.
"""
if not hasattr(empty, '_instance'):
empty._instance = Interval(AtomicInterval(OPEN, inf, -inf, OPEN))
return empty._instance | python | def empty():
"""
Create an empty set.
"""
if not hasattr(empty, '_instance'):
empty._instance = Interval(AtomicInterval(OPEN, inf, -inf, OPEN))
return empty._instance | [
"def",
"empty",
"(",
")",
":",
"if",
"not",
"hasattr",
"(",
"empty",
",",
"'_instance'",
")",
":",
"empty",
".",
"_instance",
"=",
"Interval",
"(",
"AtomicInterval",
"(",
"OPEN",
",",
"inf",
",",
"-",
"inf",
",",
"OPEN",
")",
")",
"return",
"empty",
".",
"_instance"
] | Create an empty set. | [
"Create",
"an",
"empty",
"set",
"."
] | eda4da7dd39afabab2c1689e0b5158abae08c831 | https://github.com/AlexandreDecan/python-intervals/blob/eda4da7dd39afabab2c1689e0b5158abae08c831/intervals.py#L115-L121 | train |
AlexandreDecan/python-intervals | intervals.py | from_data | def from_data(data, conv=None, pinf=float('inf'), ninf=float('-inf')):
"""
Import an interval from a piece of data.
:param data: a list of 4-uples (left, lower, upper, right).
:param conv: function that is used to convert "lower" and "upper" to bounds, default to identity.
:param pinf: value used to represent positive infinity.
:param ninf: value used to represent negative infinity.
:return: an Interval instance.
"""
intervals = []
conv = (lambda v: v) if conv is None else conv
def _convert(bound):
if bound == pinf:
return inf
elif bound == ninf:
return -inf
else:
return conv(bound)
for item in data:
left, lower, upper, right = item
intervals.append(AtomicInterval(
left,
_convert(lower),
_convert(upper),
right
))
return Interval(*intervals) | python | def from_data(data, conv=None, pinf=float('inf'), ninf=float('-inf')):
"""
Import an interval from a piece of data.
:param data: a list of 4-uples (left, lower, upper, right).
:param conv: function that is used to convert "lower" and "upper" to bounds, default to identity.
:param pinf: value used to represent positive infinity.
:param ninf: value used to represent negative infinity.
:return: an Interval instance.
"""
intervals = []
conv = (lambda v: v) if conv is None else conv
def _convert(bound):
if bound == pinf:
return inf
elif bound == ninf:
return -inf
else:
return conv(bound)
for item in data:
left, lower, upper, right = item
intervals.append(AtomicInterval(
left,
_convert(lower),
_convert(upper),
right
))
return Interval(*intervals) | [
"def",
"from_data",
"(",
"data",
",",
"conv",
"=",
"None",
",",
"pinf",
"=",
"float",
"(",
"'inf'",
")",
",",
"ninf",
"=",
"float",
"(",
"'-inf'",
")",
")",
":",
"intervals",
"=",
"[",
"]",
"conv",
"=",
"(",
"lambda",
"v",
":",
"v",
")",
"if",
"conv",
"is",
"None",
"else",
"conv",
"def",
"_convert",
"(",
"bound",
")",
":",
"if",
"bound",
"==",
"pinf",
":",
"return",
"inf",
"elif",
"bound",
"==",
"ninf",
":",
"return",
"-",
"inf",
"else",
":",
"return",
"conv",
"(",
"bound",
")",
"for",
"item",
"in",
"data",
":",
"left",
",",
"lower",
",",
"upper",
",",
"right",
"=",
"item",
"intervals",
".",
"append",
"(",
"AtomicInterval",
"(",
"left",
",",
"_convert",
"(",
"lower",
")",
",",
"_convert",
"(",
"upper",
")",
",",
"right",
")",
")",
"return",
"Interval",
"(",
"*",
"intervals",
")"
] | Import an interval from a piece of data.
:param data: a list of 4-uples (left, lower, upper, right).
:param conv: function that is used to convert "lower" and "upper" to bounds, default to identity.
:param pinf: value used to represent positive infinity.
:param ninf: value used to represent negative infinity.
:return: an Interval instance. | [
"Import",
"an",
"interval",
"from",
"a",
"piece",
"of",
"data",
"."
] | eda4da7dd39afabab2c1689e0b5158abae08c831 | https://github.com/AlexandreDecan/python-intervals/blob/eda4da7dd39afabab2c1689e0b5158abae08c831/intervals.py#L228-L257 | train |
AlexandreDecan/python-intervals | intervals.py | AtomicInterval.is_empty | def is_empty(self):
"""
Test interval emptiness.
:return: True if interval is empty, False otherwise.
"""
return (
self._lower > self._upper or
(self._lower == self._upper and (self._left == OPEN or self._right == OPEN))
) | python | def is_empty(self):
"""
Test interval emptiness.
:return: True if interval is empty, False otherwise.
"""
return (
self._lower > self._upper or
(self._lower == self._upper and (self._left == OPEN or self._right == OPEN))
) | [
"def",
"is_empty",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"_lower",
">",
"self",
".",
"_upper",
"or",
"(",
"self",
".",
"_lower",
"==",
"self",
".",
"_upper",
"and",
"(",
"self",
".",
"_left",
"==",
"OPEN",
"or",
"self",
".",
"_right",
"==",
"OPEN",
")",
")",
")"
] | Test interval emptiness.
:return: True if interval is empty, False otherwise. | [
"Test",
"interval",
"emptiness",
"."
] | eda4da7dd39afabab2c1689e0b5158abae08c831 | https://github.com/AlexandreDecan/python-intervals/blob/eda4da7dd39afabab2c1689e0b5158abae08c831/intervals.py#L355-L364 | train |
AlexandreDecan/python-intervals | intervals.py | Interval.to_atomic | def to_atomic(self):
"""
Return the smallest atomic interval containing this interval.
:return: an AtomicInterval instance.
"""
lower = self._intervals[0].lower
left = self._intervals[0].left
upper = self._intervals[-1].upper
right = self._intervals[-1].right
return AtomicInterval(left, lower, upper, right) | python | def to_atomic(self):
"""
Return the smallest atomic interval containing this interval.
:return: an AtomicInterval instance.
"""
lower = self._intervals[0].lower
left = self._intervals[0].left
upper = self._intervals[-1].upper
right = self._intervals[-1].right
return AtomicInterval(left, lower, upper, right) | [
"def",
"to_atomic",
"(",
"self",
")",
":",
"lower",
"=",
"self",
".",
"_intervals",
"[",
"0",
"]",
".",
"lower",
"left",
"=",
"self",
".",
"_intervals",
"[",
"0",
"]",
".",
"left",
"upper",
"=",
"self",
".",
"_intervals",
"[",
"-",
"1",
"]",
".",
"upper",
"right",
"=",
"self",
".",
"_intervals",
"[",
"-",
"1",
"]",
".",
"right",
"return",
"AtomicInterval",
"(",
"left",
",",
"lower",
",",
"upper",
",",
"right",
")"
] | Return the smallest atomic interval containing this interval.
:return: an AtomicInterval instance. | [
"Return",
"the",
"smallest",
"atomic",
"interval",
"containing",
"this",
"interval",
"."
] | eda4da7dd39afabab2c1689e0b5158abae08c831 | https://github.com/AlexandreDecan/python-intervals/blob/eda4da7dd39afabab2c1689e0b5158abae08c831/intervals.py#L730-L741 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.