repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
valid_ip_prefix
def valid_ip_prefix(ip_prefix): """Perform a sanity check on ip_prefix. Arguments: ip_prefix (str): The IP-Prefix to validate Returns: True if ip_prefix is a valid IPv4 address with prefix length 32 or a valid IPv6 address with prefix length 128, otherwise False """ try: ip_prefix = ipaddress.ip_network(ip_prefix) except ValueError: return False else: if ip_prefix.version == 4 and ip_prefix.max_prefixlen != 32: return False if ip_prefix.version == 6 and ip_prefix.max_prefixlen != 128: return False return True
python
def valid_ip_prefix(ip_prefix): """Perform a sanity check on ip_prefix. Arguments: ip_prefix (str): The IP-Prefix to validate Returns: True if ip_prefix is a valid IPv4 address with prefix length 32 or a valid IPv6 address with prefix length 128, otherwise False """ try: ip_prefix = ipaddress.ip_network(ip_prefix) except ValueError: return False else: if ip_prefix.version == 4 and ip_prefix.max_prefixlen != 32: return False if ip_prefix.version == 6 and ip_prefix.max_prefixlen != 128: return False return True
[ "def", "valid_ip_prefix", "(", "ip_prefix", ")", ":", "try", ":", "ip_prefix", "=", "ipaddress", ".", "ip_network", "(", "ip_prefix", ")", "except", "ValueError", ":", "return", "False", "else", ":", "if", "ip_prefix", ".", "version", "==", "4", "and", "ip_prefix", ".", "max_prefixlen", "!=", "32", ":", "return", "False", "if", "ip_prefix", ".", "version", "==", "6", "and", "ip_prefix", ".", "max_prefixlen", "!=", "128", ":", "return", "False", "return", "True" ]
Perform a sanity check on ip_prefix. Arguments: ip_prefix (str): The IP-Prefix to validate Returns: True if ip_prefix is a valid IPv4 address with prefix length 32 or a valid IPv6 address with prefix length 128, otherwise False
[ "Perform", "a", "sanity", "check", "on", "ip_prefix", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L82-L102
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
get_ip_prefixes_from_config
def get_ip_prefixes_from_config(config, services, ip_version): """Build a set of IP prefixes found in service configuration files. Arguments: config (obg): A configparser object which holds our configuration. services (list): A list of section names which are the name of the service checks. ip_version (int): IP protocol version Returns: A set of IP prefixes. """ ip_prefixes = set() for service in services: ip_prefix = ipaddress.ip_network(config.get(service, 'ip_prefix')) if ip_prefix.version == ip_version: ip_prefixes.add(ip_prefix.with_prefixlen) return ip_prefixes
python
def get_ip_prefixes_from_config(config, services, ip_version): """Build a set of IP prefixes found in service configuration files. Arguments: config (obg): A configparser object which holds our configuration. services (list): A list of section names which are the name of the service checks. ip_version (int): IP protocol version Returns: A set of IP prefixes. """ ip_prefixes = set() for service in services: ip_prefix = ipaddress.ip_network(config.get(service, 'ip_prefix')) if ip_prefix.version == ip_version: ip_prefixes.add(ip_prefix.with_prefixlen) return ip_prefixes
[ "def", "get_ip_prefixes_from_config", "(", "config", ",", "services", ",", "ip_version", ")", ":", "ip_prefixes", "=", "set", "(", ")", "for", "service", "in", "services", ":", "ip_prefix", "=", "ipaddress", ".", "ip_network", "(", "config", ".", "get", "(", "service", ",", "'ip_prefix'", ")", ")", "if", "ip_prefix", ".", "version", "==", "ip_version", ":", "ip_prefixes", ".", "add", "(", "ip_prefix", ".", "with_prefixlen", ")", "return", "ip_prefixes" ]
Build a set of IP prefixes found in service configuration files. Arguments: config (obg): A configparser object which holds our configuration. services (list): A list of section names which are the name of the service checks. ip_version (int): IP protocol version Returns: A set of IP prefixes.
[ "Build", "a", "set", "of", "IP", "prefixes", "found", "in", "service", "configuration", "files", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L125-L145
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
ip_prefixes_sanity_check
def ip_prefixes_sanity_check(config, bird_configuration): """Sanity check on IP prefixes. Arguments: config (obg): A configparser object which holds our configuration. bird_configuration (dict): A dictionary, which holds Bird configuration per IP protocol version. """ for ip_version in bird_configuration: modify_ip_prefixes(config, bird_configuration[ip_version]['config_file'], bird_configuration[ip_version]['variable_name'], bird_configuration[ip_version]['dummy_ip_prefix'], bird_configuration[ip_version]['reconfigure_cmd'], bird_configuration[ip_version]['keep_changes'], bird_configuration[ip_version]['changes_counter'], ip_version)
python
def ip_prefixes_sanity_check(config, bird_configuration): """Sanity check on IP prefixes. Arguments: config (obg): A configparser object which holds our configuration. bird_configuration (dict): A dictionary, which holds Bird configuration per IP protocol version. """ for ip_version in bird_configuration: modify_ip_prefixes(config, bird_configuration[ip_version]['config_file'], bird_configuration[ip_version]['variable_name'], bird_configuration[ip_version]['dummy_ip_prefix'], bird_configuration[ip_version]['reconfigure_cmd'], bird_configuration[ip_version]['keep_changes'], bird_configuration[ip_version]['changes_counter'], ip_version)
[ "def", "ip_prefixes_sanity_check", "(", "config", ",", "bird_configuration", ")", ":", "for", "ip_version", "in", "bird_configuration", ":", "modify_ip_prefixes", "(", "config", ",", "bird_configuration", "[", "ip_version", "]", "[", "'config_file'", "]", ",", "bird_configuration", "[", "ip_version", "]", "[", "'variable_name'", "]", ",", "bird_configuration", "[", "ip_version", "]", "[", "'dummy_ip_prefix'", "]", ",", "bird_configuration", "[", "ip_version", "]", "[", "'reconfigure_cmd'", "]", ",", "bird_configuration", "[", "ip_version", "]", "[", "'keep_changes'", "]", ",", "bird_configuration", "[", "ip_version", "]", "[", "'changes_counter'", "]", ",", "ip_version", ")" ]
Sanity check on IP prefixes. Arguments: config (obg): A configparser object which holds our configuration. bird_configuration (dict): A dictionary, which holds Bird configuration per IP protocol version.
[ "Sanity", "check", "on", "IP", "prefixes", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L148-L165
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
modify_ip_prefixes
def modify_ip_prefixes( config, config_file, variable_name, dummy_ip_prefix, reconfigure_cmd, keep_changes, changes_counter, ip_version): """Modify IP prefixes in Bird configuration. Depending on the configuration either removes or reports IP prefixes found in Bird configuration for which we don't have a service check associated with them. Moreover, it adds the dummy IP prefix if it isn't present and ensures that the correct variable name is set. Arguments: config (obg): A configparser object which holds our configuration. config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration dummy_ip_prefix (str): The dummy IP prefix, which must be always reconfigure_cmd (str): The command to run to trigger a reconfiguration on Bird daemon upon successful configuration update keep_changes (boolean): To enable keeping a history of changes applied to bird configuration changes_counter (int): The number of configuration changes to keep ip_version (int): IP protocol version of Bird configuration """ log = logging.getLogger(PROGRAM_NAME) services = config.sections() services.remove('daemon') # not needed during sanity check for IP-Prefixes update_bird_conf = False try: ip_prefixes_in_bird = get_ip_prefixes_from_bird(config_file) except OSError as error: log.error("failed to open Bird configuration %s, this is a FATAL " "error, thus exiting main program", error) sys.exit(1) _name = get_variable_name_from_bird(config_file) if _name is None: log.warning("failed to find variable name in %s, going to add it", config_file) update_bird_conf = True elif _name != variable_name: log.warning("found incorrect variable name in %s, going to add the " "correct one %s", _name, variable_name) update_bird_conf = True if dummy_ip_prefix not in ip_prefixes_in_bird: log.warning("dummy IP prefix %s is missing from bird configuration " "%s, adding it", dummy_ip_prefix, config_file) ip_prefixes_in_bird.insert(0, dummy_ip_prefix) update_bird_conf = True # Find IP prefixes in Bird configuration without a check. ip_prefixes_with_check = get_ip_prefixes_from_config( config, services, ip_version) # dummy_ip_prefix doesn't have a config by design ip_prefixes_with_check.add(dummy_ip_prefix) ip_prefixes_without_check = set(ip_prefixes_in_bird).difference( ip_prefixes_with_check) if ip_prefixes_without_check: if config.getboolean('daemon', 'purge_ip_prefixes'): log.warning("removing IP prefix(es) %s from %s because they don't " "have a service check configured", ','.join(ip_prefixes_without_check), config_file) ip_prefixes_in_bird[:] = (ip for ip in ip_prefixes_in_bird if ip not in ip_prefixes_without_check) update_bird_conf = True else: log.warning("found IP prefixes %s in %s without a service " "check configured", ','.join(ip_prefixes_without_check), config_file) if update_bird_conf: if keep_changes: archive_bird_conf(config_file, changes_counter) tempname = write_temp_bird_conf( dummy_ip_prefix, config_file, variable_name, ip_prefixes_in_bird ) try: os.rename(tempname, config_file) except OSError as error: msg = ("CRITICAL: failed to create Bird configuration {e}, " "this is FATAL error, thus exiting main program" .format(e=error)) sys.exit("{m}".format(m=msg)) else: log.info("Bird configuration for IPv%s is updated", ip_version) reconfigure_bird(reconfigure_cmd)
python
def modify_ip_prefixes( config, config_file, variable_name, dummy_ip_prefix, reconfigure_cmd, keep_changes, changes_counter, ip_version): """Modify IP prefixes in Bird configuration. Depending on the configuration either removes or reports IP prefixes found in Bird configuration for which we don't have a service check associated with them. Moreover, it adds the dummy IP prefix if it isn't present and ensures that the correct variable name is set. Arguments: config (obg): A configparser object which holds our configuration. config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration dummy_ip_prefix (str): The dummy IP prefix, which must be always reconfigure_cmd (str): The command to run to trigger a reconfiguration on Bird daemon upon successful configuration update keep_changes (boolean): To enable keeping a history of changes applied to bird configuration changes_counter (int): The number of configuration changes to keep ip_version (int): IP protocol version of Bird configuration """ log = logging.getLogger(PROGRAM_NAME) services = config.sections() services.remove('daemon') # not needed during sanity check for IP-Prefixes update_bird_conf = False try: ip_prefixes_in_bird = get_ip_prefixes_from_bird(config_file) except OSError as error: log.error("failed to open Bird configuration %s, this is a FATAL " "error, thus exiting main program", error) sys.exit(1) _name = get_variable_name_from_bird(config_file) if _name is None: log.warning("failed to find variable name in %s, going to add it", config_file) update_bird_conf = True elif _name != variable_name: log.warning("found incorrect variable name in %s, going to add the " "correct one %s", _name, variable_name) update_bird_conf = True if dummy_ip_prefix not in ip_prefixes_in_bird: log.warning("dummy IP prefix %s is missing from bird configuration " "%s, adding it", dummy_ip_prefix, config_file) ip_prefixes_in_bird.insert(0, dummy_ip_prefix) update_bird_conf = True # Find IP prefixes in Bird configuration without a check. ip_prefixes_with_check = get_ip_prefixes_from_config( config, services, ip_version) # dummy_ip_prefix doesn't have a config by design ip_prefixes_with_check.add(dummy_ip_prefix) ip_prefixes_without_check = set(ip_prefixes_in_bird).difference( ip_prefixes_with_check) if ip_prefixes_without_check: if config.getboolean('daemon', 'purge_ip_prefixes'): log.warning("removing IP prefix(es) %s from %s because they don't " "have a service check configured", ','.join(ip_prefixes_without_check), config_file) ip_prefixes_in_bird[:] = (ip for ip in ip_prefixes_in_bird if ip not in ip_prefixes_without_check) update_bird_conf = True else: log.warning("found IP prefixes %s in %s without a service " "check configured", ','.join(ip_prefixes_without_check), config_file) if update_bird_conf: if keep_changes: archive_bird_conf(config_file, changes_counter) tempname = write_temp_bird_conf( dummy_ip_prefix, config_file, variable_name, ip_prefixes_in_bird ) try: os.rename(tempname, config_file) except OSError as error: msg = ("CRITICAL: failed to create Bird configuration {e}, " "this is FATAL error, thus exiting main program" .format(e=error)) sys.exit("{m}".format(m=msg)) else: log.info("Bird configuration for IPv%s is updated", ip_version) reconfigure_bird(reconfigure_cmd)
[ "def", "modify_ip_prefixes", "(", "config", ",", "config_file", ",", "variable_name", ",", "dummy_ip_prefix", ",", "reconfigure_cmd", ",", "keep_changes", ",", "changes_counter", ",", "ip_version", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "services", "=", "config", ".", "sections", "(", ")", "services", ".", "remove", "(", "'daemon'", ")", "# not needed during sanity check for IP-Prefixes", "update_bird_conf", "=", "False", "try", ":", "ip_prefixes_in_bird", "=", "get_ip_prefixes_from_bird", "(", "config_file", ")", "except", "OSError", "as", "error", ":", "log", ".", "error", "(", "\"failed to open Bird configuration %s, this is a FATAL \"", "\"error, thus exiting main program\"", ",", "error", ")", "sys", ".", "exit", "(", "1", ")", "_name", "=", "get_variable_name_from_bird", "(", "config_file", ")", "if", "_name", "is", "None", ":", "log", ".", "warning", "(", "\"failed to find variable name in %s, going to add it\"", ",", "config_file", ")", "update_bird_conf", "=", "True", "elif", "_name", "!=", "variable_name", ":", "log", ".", "warning", "(", "\"found incorrect variable name in %s, going to add the \"", "\"correct one %s\"", ",", "_name", ",", "variable_name", ")", "update_bird_conf", "=", "True", "if", "dummy_ip_prefix", "not", "in", "ip_prefixes_in_bird", ":", "log", ".", "warning", "(", "\"dummy IP prefix %s is missing from bird configuration \"", "\"%s, adding it\"", ",", "dummy_ip_prefix", ",", "config_file", ")", "ip_prefixes_in_bird", ".", "insert", "(", "0", ",", "dummy_ip_prefix", ")", "update_bird_conf", "=", "True", "# Find IP prefixes in Bird configuration without a check.", "ip_prefixes_with_check", "=", "get_ip_prefixes_from_config", "(", "config", ",", "services", ",", "ip_version", ")", "# dummy_ip_prefix doesn't have a config by design", "ip_prefixes_with_check", ".", "add", "(", "dummy_ip_prefix", ")", "ip_prefixes_without_check", "=", "set", "(", "ip_prefixes_in_bird", ")", ".", "difference", "(", "ip_prefixes_with_check", ")", "if", "ip_prefixes_without_check", ":", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'purge_ip_prefixes'", ")", ":", "log", ".", "warning", "(", "\"removing IP prefix(es) %s from %s because they don't \"", "\"have a service check configured\"", ",", "','", ".", "join", "(", "ip_prefixes_without_check", ")", ",", "config_file", ")", "ip_prefixes_in_bird", "[", ":", "]", "=", "(", "ip", "for", "ip", "in", "ip_prefixes_in_bird", "if", "ip", "not", "in", "ip_prefixes_without_check", ")", "update_bird_conf", "=", "True", "else", ":", "log", ".", "warning", "(", "\"found IP prefixes %s in %s without a service \"", "\"check configured\"", ",", "','", ".", "join", "(", "ip_prefixes_without_check", ")", ",", "config_file", ")", "if", "update_bird_conf", ":", "if", "keep_changes", ":", "archive_bird_conf", "(", "config_file", ",", "changes_counter", ")", "tempname", "=", "write_temp_bird_conf", "(", "dummy_ip_prefix", ",", "config_file", ",", "variable_name", ",", "ip_prefixes_in_bird", ")", "try", ":", "os", ".", "rename", "(", "tempname", ",", "config_file", ")", "except", "OSError", "as", "error", ":", "msg", "=", "(", "\"CRITICAL: failed to create Bird configuration {e}, \"", "\"this is FATAL error, thus exiting main program\"", ".", "format", "(", "e", "=", "error", ")", ")", "sys", ".", "exit", "(", "\"{m}\"", ".", "format", "(", "m", "=", "msg", ")", ")", "else", ":", "log", ".", "info", "(", "\"Bird configuration for IPv%s is updated\"", ",", "ip_version", ")", "reconfigure_bird", "(", "reconfigure_cmd", ")" ]
Modify IP prefixes in Bird configuration. Depending on the configuration either removes or reports IP prefixes found in Bird configuration for which we don't have a service check associated with them. Moreover, it adds the dummy IP prefix if it isn't present and ensures that the correct variable name is set. Arguments: config (obg): A configparser object which holds our configuration. config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration dummy_ip_prefix (str): The dummy IP prefix, which must be always reconfigure_cmd (str): The command to run to trigger a reconfiguration on Bird daemon upon successful configuration update keep_changes (boolean): To enable keeping a history of changes applied to bird configuration changes_counter (int): The number of configuration changes to keep ip_version (int): IP protocol version of Bird configuration
[ "Modify", "IP", "prefixes", "in", "Bird", "configuration", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L168-L268
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
load_configuration
def load_configuration(config_file, config_dir, service_file): """Build configuration objects. If all sanity checks against daemon and service check settings are passed then it builds a ConfigParser object which holds all our configuration and a dictionary data structure which holds Bird configuration per IP protocol version. Arguments: config_file (str): The file name which holds daemon settings config_dir (str): The directory name which has configuration files for each service check service_file (str): A file which contains configuration for a single service check Returns: A tuple with 1st element a ConfigParser object and 2nd element a dictionary. Raises: ValueError if a sanity check fails. """ config_files = [config_file] config = configparser.ConfigParser() config.read_dict(DEFAULT_OPTIONS) if not os.path.isfile(config_file): raise ValueError("{f} configuration file either isn't readable or " "doesn't exist".format(f=config_file)) if service_file is not None: if not os.path.isfile(service_file): raise ValueError("{f} configuration file for a service check " "doesn't exist".format(f=service_file)) else: config_files.append(service_file) elif config_dir is not None: if not os.path.isdir(config_dir): raise ValueError("{d} directory with configuration files for " "service checks doesn't exist" .format(d=config_dir)) else: config_files.extend(glob.glob(os.path.join(config_dir, '*.conf'))) try: config.read(config_files) except configparser.Error as exc: raise ValueError(exc) configuration_check(config) bird_configuration = build_bird_configuration(config) create_bird_config_files(bird_configuration) return config, bird_configuration
python
def load_configuration(config_file, config_dir, service_file): """Build configuration objects. If all sanity checks against daemon and service check settings are passed then it builds a ConfigParser object which holds all our configuration and a dictionary data structure which holds Bird configuration per IP protocol version. Arguments: config_file (str): The file name which holds daemon settings config_dir (str): The directory name which has configuration files for each service check service_file (str): A file which contains configuration for a single service check Returns: A tuple with 1st element a ConfigParser object and 2nd element a dictionary. Raises: ValueError if a sanity check fails. """ config_files = [config_file] config = configparser.ConfigParser() config.read_dict(DEFAULT_OPTIONS) if not os.path.isfile(config_file): raise ValueError("{f} configuration file either isn't readable or " "doesn't exist".format(f=config_file)) if service_file is not None: if not os.path.isfile(service_file): raise ValueError("{f} configuration file for a service check " "doesn't exist".format(f=service_file)) else: config_files.append(service_file) elif config_dir is not None: if not os.path.isdir(config_dir): raise ValueError("{d} directory with configuration files for " "service checks doesn't exist" .format(d=config_dir)) else: config_files.extend(glob.glob(os.path.join(config_dir, '*.conf'))) try: config.read(config_files) except configparser.Error as exc: raise ValueError(exc) configuration_check(config) bird_configuration = build_bird_configuration(config) create_bird_config_files(bird_configuration) return config, bird_configuration
[ "def", "load_configuration", "(", "config_file", ",", "config_dir", ",", "service_file", ")", ":", "config_files", "=", "[", "config_file", "]", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read_dict", "(", "DEFAULT_OPTIONS", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ":", "raise", "ValueError", "(", "\"{f} configuration file either isn't readable or \"", "\"doesn't exist\"", ".", "format", "(", "f", "=", "config_file", ")", ")", "if", "service_file", "is", "not", "None", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "service_file", ")", ":", "raise", "ValueError", "(", "\"{f} configuration file for a service check \"", "\"doesn't exist\"", ".", "format", "(", "f", "=", "service_file", ")", ")", "else", ":", "config_files", ".", "append", "(", "service_file", ")", "elif", "config_dir", "is", "not", "None", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "config_dir", ")", ":", "raise", "ValueError", "(", "\"{d} directory with configuration files for \"", "\"service checks doesn't exist\"", ".", "format", "(", "d", "=", "config_dir", ")", ")", "else", ":", "config_files", ".", "extend", "(", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "config_dir", ",", "'*.conf'", ")", ")", ")", "try", ":", "config", ".", "read", "(", "config_files", ")", "except", "configparser", ".", "Error", "as", "exc", ":", "raise", "ValueError", "(", "exc", ")", "configuration_check", "(", "config", ")", "bird_configuration", "=", "build_bird_configuration", "(", "config", ")", "create_bird_config_files", "(", "bird_configuration", ")", "return", "config", ",", "bird_configuration" ]
Build configuration objects. If all sanity checks against daemon and service check settings are passed then it builds a ConfigParser object which holds all our configuration and a dictionary data structure which holds Bird configuration per IP protocol version. Arguments: config_file (str): The file name which holds daemon settings config_dir (str): The directory name which has configuration files for each service check service_file (str): A file which contains configuration for a single service check Returns: A tuple with 1st element a ConfigParser object and 2nd element a dictionary. Raises: ValueError if a sanity check fails.
[ "Build", "configuration", "objects", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L271-L323
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
configuration_check
def configuration_check(config): """Perform a sanity check on configuration. First it performs a sanity check against settings for daemon and then against settings for each service check. Arguments: config (obj): A configparser object which holds our configuration. Returns: None if all checks are successfully passed otherwise raises a ValueError exception. """ log_level = config.get('daemon', 'loglevel') num_level = getattr(logging, log_level.upper(), None) pidfile = config.get('daemon', 'pidfile') # Catch the case where the directory, under which we store the pid file, is # missing. if not os.path.isdir(os.path.dirname(pidfile)): raise ValueError("{d} doesn't exit".format(d=os.path.dirname(pidfile))) if not isinstance(num_level, int): raise ValueError('Invalid log level: {}'.format(log_level)) for _file in 'log_file', 'stderr_file': if config.has_option('daemon', _file): try: touch(config.get('daemon', _file)) except OSError as exc: raise ValueError(exc) for option, getter in DAEMON_OPTIONS_TYPE.items(): try: getattr(config, getter)('daemon', option) except configparser.NoOptionError as error: if option not in DAEMON_OPTIONAL_OPTIONS: raise ValueError(error) except configparser.Error as error: raise ValueError(error) except ValueError as exc: msg = ("invalid data for '{opt}' option in daemon section: {err}" .format(opt=option, err=exc)) raise ValueError(msg) service_configuration_check(config)
python
def configuration_check(config): """Perform a sanity check on configuration. First it performs a sanity check against settings for daemon and then against settings for each service check. Arguments: config (obj): A configparser object which holds our configuration. Returns: None if all checks are successfully passed otherwise raises a ValueError exception. """ log_level = config.get('daemon', 'loglevel') num_level = getattr(logging, log_level.upper(), None) pidfile = config.get('daemon', 'pidfile') # Catch the case where the directory, under which we store the pid file, is # missing. if not os.path.isdir(os.path.dirname(pidfile)): raise ValueError("{d} doesn't exit".format(d=os.path.dirname(pidfile))) if not isinstance(num_level, int): raise ValueError('Invalid log level: {}'.format(log_level)) for _file in 'log_file', 'stderr_file': if config.has_option('daemon', _file): try: touch(config.get('daemon', _file)) except OSError as exc: raise ValueError(exc) for option, getter in DAEMON_OPTIONS_TYPE.items(): try: getattr(config, getter)('daemon', option) except configparser.NoOptionError as error: if option not in DAEMON_OPTIONAL_OPTIONS: raise ValueError(error) except configparser.Error as error: raise ValueError(error) except ValueError as exc: msg = ("invalid data for '{opt}' option in daemon section: {err}" .format(opt=option, err=exc)) raise ValueError(msg) service_configuration_check(config)
[ "def", "configuration_check", "(", "config", ")", ":", "log_level", "=", "config", ".", "get", "(", "'daemon'", ",", "'loglevel'", ")", "num_level", "=", "getattr", "(", "logging", ",", "log_level", ".", "upper", "(", ")", ",", "None", ")", "pidfile", "=", "config", ".", "get", "(", "'daemon'", ",", "'pidfile'", ")", "# Catch the case where the directory, under which we store the pid file, is", "# missing.", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "pidfile", ")", ")", ":", "raise", "ValueError", "(", "\"{d} doesn't exit\"", ".", "format", "(", "d", "=", "os", ".", "path", ".", "dirname", "(", "pidfile", ")", ")", ")", "if", "not", "isinstance", "(", "num_level", ",", "int", ")", ":", "raise", "ValueError", "(", "'Invalid log level: {}'", ".", "format", "(", "log_level", ")", ")", "for", "_file", "in", "'log_file'", ",", "'stderr_file'", ":", "if", "config", ".", "has_option", "(", "'daemon'", ",", "_file", ")", ":", "try", ":", "touch", "(", "config", ".", "get", "(", "'daemon'", ",", "_file", ")", ")", "except", "OSError", "as", "exc", ":", "raise", "ValueError", "(", "exc", ")", "for", "option", ",", "getter", "in", "DAEMON_OPTIONS_TYPE", ".", "items", "(", ")", ":", "try", ":", "getattr", "(", "config", ",", "getter", ")", "(", "'daemon'", ",", "option", ")", "except", "configparser", ".", "NoOptionError", "as", "error", ":", "if", "option", "not", "in", "DAEMON_OPTIONAL_OPTIONS", ":", "raise", "ValueError", "(", "error", ")", "except", "configparser", ".", "Error", "as", "error", ":", "raise", "ValueError", "(", "error", ")", "except", "ValueError", "as", "exc", ":", "msg", "=", "(", "\"invalid data for '{opt}' option in daemon section: {err}\"", ".", "format", "(", "opt", "=", "option", ",", "err", "=", "exc", ")", ")", "raise", "ValueError", "(", "msg", ")", "service_configuration_check", "(", "config", ")" ]
Perform a sanity check on configuration. First it performs a sanity check against settings for daemon and then against settings for each service check. Arguments: config (obj): A configparser object which holds our configuration. Returns: None if all checks are successfully passed otherwise raises a ValueError exception.
[ "Perform", "a", "sanity", "check", "on", "configuration", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L326-L372
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
build_bird_configuration
def build_bird_configuration(config): """Build bird configuration structure. First it performs a sanity check against bird settings and then builds a dictionary structure with bird configuration per IP version. Arguments: config (obj): A configparser object which holds our configuration. Returns: A dictionary Raises: ValueError if sanity check fails. """ bird_configuration = {} if config.getboolean('daemon', 'ipv4'): if os.path.islink(config.get('daemon', 'bird_conf')): config_file = os.path.realpath(config.get('daemon', 'bird_conf')) print("'bird_conf' is set to a symbolic link ({s} -> {d}, but we " "will use the canonical path of that link" .format(s=config.get('daemon', 'bird_conf'), d=config_file)) else: config_file = config.get('daemon', 'bird_conf') dummy_ip_prefix = config.get('daemon', 'dummy_ip_prefix') if not valid_ip_prefix(dummy_ip_prefix): raise ValueError("invalid dummy IPv4 prefix: {i}" .format(i=dummy_ip_prefix)) bird_configuration[4] = { 'config_file': config_file, 'variable_name': config.get('daemon', 'bird_variable'), 'dummy_ip_prefix': dummy_ip_prefix, 'reconfigure_cmd': config.get('daemon', 'bird_reconfigure_cmd'), 'keep_changes': config.getboolean('daemon', 'bird_keep_changes'), 'changes_counter': config.getint('daemon', 'bird_changes_counter') } if config.getboolean('daemon', 'ipv6'): if os.path.islink(config.get('daemon', 'bird6_conf')): config_file = os.path.realpath(config.get('daemon', 'bird6_conf')) print("'bird6_conf' is set to a symbolic link ({s} -> {d}, but we " "will use the canonical path of that link" .format(s=config.get('daemon', 'bird6_conf'), d=config_file)) else: config_file = config.get('daemon', 'bird6_conf') dummy_ip_prefix = config.get('daemon', 'dummy_ip6_prefix') if not valid_ip_prefix(dummy_ip_prefix): raise ValueError("invalid dummy IPv6 prefix: {i}" .format(i=dummy_ip_prefix)) bird_configuration[6] = { 'config_file': config_file, 'variable_name': config.get('daemon', 'bird6_variable'), 'dummy_ip_prefix': dummy_ip_prefix, 'reconfigure_cmd': config.get('daemon', 'bird6_reconfigure_cmd'), 'keep_changes': config.getboolean('daemon', 'bird6_keep_changes'), 'changes_counter': config.getint('daemon', 'bird6_changes_counter') } return bird_configuration
python
def build_bird_configuration(config): """Build bird configuration structure. First it performs a sanity check against bird settings and then builds a dictionary structure with bird configuration per IP version. Arguments: config (obj): A configparser object which holds our configuration. Returns: A dictionary Raises: ValueError if sanity check fails. """ bird_configuration = {} if config.getboolean('daemon', 'ipv4'): if os.path.islink(config.get('daemon', 'bird_conf')): config_file = os.path.realpath(config.get('daemon', 'bird_conf')) print("'bird_conf' is set to a symbolic link ({s} -> {d}, but we " "will use the canonical path of that link" .format(s=config.get('daemon', 'bird_conf'), d=config_file)) else: config_file = config.get('daemon', 'bird_conf') dummy_ip_prefix = config.get('daemon', 'dummy_ip_prefix') if not valid_ip_prefix(dummy_ip_prefix): raise ValueError("invalid dummy IPv4 prefix: {i}" .format(i=dummy_ip_prefix)) bird_configuration[4] = { 'config_file': config_file, 'variable_name': config.get('daemon', 'bird_variable'), 'dummy_ip_prefix': dummy_ip_prefix, 'reconfigure_cmd': config.get('daemon', 'bird_reconfigure_cmd'), 'keep_changes': config.getboolean('daemon', 'bird_keep_changes'), 'changes_counter': config.getint('daemon', 'bird_changes_counter') } if config.getboolean('daemon', 'ipv6'): if os.path.islink(config.get('daemon', 'bird6_conf')): config_file = os.path.realpath(config.get('daemon', 'bird6_conf')) print("'bird6_conf' is set to a symbolic link ({s} -> {d}, but we " "will use the canonical path of that link" .format(s=config.get('daemon', 'bird6_conf'), d=config_file)) else: config_file = config.get('daemon', 'bird6_conf') dummy_ip_prefix = config.get('daemon', 'dummy_ip6_prefix') if not valid_ip_prefix(dummy_ip_prefix): raise ValueError("invalid dummy IPv6 prefix: {i}" .format(i=dummy_ip_prefix)) bird_configuration[6] = { 'config_file': config_file, 'variable_name': config.get('daemon', 'bird6_variable'), 'dummy_ip_prefix': dummy_ip_prefix, 'reconfigure_cmd': config.get('daemon', 'bird6_reconfigure_cmd'), 'keep_changes': config.getboolean('daemon', 'bird6_keep_changes'), 'changes_counter': config.getint('daemon', 'bird6_changes_counter') } return bird_configuration
[ "def", "build_bird_configuration", "(", "config", ")", ":", "bird_configuration", "=", "{", "}", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'ipv4'", ")", ":", "if", "os", ".", "path", ".", "islink", "(", "config", ".", "get", "(", "'daemon'", ",", "'bird_conf'", ")", ")", ":", "config_file", "=", "os", ".", "path", ".", "realpath", "(", "config", ".", "get", "(", "'daemon'", ",", "'bird_conf'", ")", ")", "print", "(", "\"'bird_conf' is set to a symbolic link ({s} -> {d}, but we \"", "\"will use the canonical path of that link\"", ".", "format", "(", "s", "=", "config", ".", "get", "(", "'daemon'", ",", "'bird_conf'", ")", ",", "d", "=", "config_file", ")", ")", "else", ":", "config_file", "=", "config", ".", "get", "(", "'daemon'", ",", "'bird_conf'", ")", "dummy_ip_prefix", "=", "config", ".", "get", "(", "'daemon'", ",", "'dummy_ip_prefix'", ")", "if", "not", "valid_ip_prefix", "(", "dummy_ip_prefix", ")", ":", "raise", "ValueError", "(", "\"invalid dummy IPv4 prefix: {i}\"", ".", "format", "(", "i", "=", "dummy_ip_prefix", ")", ")", "bird_configuration", "[", "4", "]", "=", "{", "'config_file'", ":", "config_file", ",", "'variable_name'", ":", "config", ".", "get", "(", "'daemon'", ",", "'bird_variable'", ")", ",", "'dummy_ip_prefix'", ":", "dummy_ip_prefix", ",", "'reconfigure_cmd'", ":", "config", ".", "get", "(", "'daemon'", ",", "'bird_reconfigure_cmd'", ")", ",", "'keep_changes'", ":", "config", ".", "getboolean", "(", "'daemon'", ",", "'bird_keep_changes'", ")", ",", "'changes_counter'", ":", "config", ".", "getint", "(", "'daemon'", ",", "'bird_changes_counter'", ")", "}", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'ipv6'", ")", ":", "if", "os", ".", "path", ".", "islink", "(", "config", ".", "get", "(", "'daemon'", ",", "'bird6_conf'", ")", ")", ":", "config_file", "=", "os", ".", "path", ".", "realpath", "(", "config", ".", "get", "(", "'daemon'", ",", "'bird6_conf'", ")", ")", "print", "(", "\"'bird6_conf' is set to a symbolic link ({s} -> {d}, but we \"", "\"will use the canonical path of that link\"", ".", "format", "(", "s", "=", "config", ".", "get", "(", "'daemon'", ",", "'bird6_conf'", ")", ",", "d", "=", "config_file", ")", ")", "else", ":", "config_file", "=", "config", ".", "get", "(", "'daemon'", ",", "'bird6_conf'", ")", "dummy_ip_prefix", "=", "config", ".", "get", "(", "'daemon'", ",", "'dummy_ip6_prefix'", ")", "if", "not", "valid_ip_prefix", "(", "dummy_ip_prefix", ")", ":", "raise", "ValueError", "(", "\"invalid dummy IPv6 prefix: {i}\"", ".", "format", "(", "i", "=", "dummy_ip_prefix", ")", ")", "bird_configuration", "[", "6", "]", "=", "{", "'config_file'", ":", "config_file", ",", "'variable_name'", ":", "config", ".", "get", "(", "'daemon'", ",", "'bird6_variable'", ")", ",", "'dummy_ip_prefix'", ":", "dummy_ip_prefix", ",", "'reconfigure_cmd'", ":", "config", ".", "get", "(", "'daemon'", ",", "'bird6_reconfigure_cmd'", ")", ",", "'keep_changes'", ":", "config", ".", "getboolean", "(", "'daemon'", ",", "'bird6_keep_changes'", ")", ",", "'changes_counter'", ":", "config", ".", "getint", "(", "'daemon'", ",", "'bird6_changes_counter'", ")", "}", "return", "bird_configuration" ]
Build bird configuration structure. First it performs a sanity check against bird settings and then builds a dictionary structure with bird configuration per IP version. Arguments: config (obj): A configparser object which holds our configuration. Returns: A dictionary Raises: ValueError if sanity check fails.
[ "Build", "bird", "configuration", "structure", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L457-L519
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
get_variable_name_from_bird
def get_variable_name_from_bird(bird_conf): """Return the variable name set in Bird configuration. The variable name in Bird configuration is set with the keyword 'define', here is an example: define ACAST_PS_ADVERTISE = and we exract the string between the word 'define' and the equals sign. Arguments: bird_conf (str): The absolute file name path of Bird configuration. Returns: The variable name as a string or None if it isn't found. """ bird_variable_pattern = re.compile( r''' ^\s* define\s+ (?P<name>\S+\b) \s+ = ''', re.VERBOSE ) with open(bird_conf, 'r') as content: for line in content.readlines(): variable_match = bird_variable_pattern.search(line) if variable_match: return variable_match.group('name') return None
python
def get_variable_name_from_bird(bird_conf): """Return the variable name set in Bird configuration. The variable name in Bird configuration is set with the keyword 'define', here is an example: define ACAST_PS_ADVERTISE = and we exract the string between the word 'define' and the equals sign. Arguments: bird_conf (str): The absolute file name path of Bird configuration. Returns: The variable name as a string or None if it isn't found. """ bird_variable_pattern = re.compile( r''' ^\s* define\s+ (?P<name>\S+\b) \s+ = ''', re.VERBOSE ) with open(bird_conf, 'r') as content: for line in content.readlines(): variable_match = bird_variable_pattern.search(line) if variable_match: return variable_match.group('name') return None
[ "def", "get_variable_name_from_bird", "(", "bird_conf", ")", ":", "bird_variable_pattern", "=", "re", ".", "compile", "(", "r'''\n ^\\s*\n define\\s+\n (?P<name>\\S+\\b)\n \\s+\n =\n '''", ",", "re", ".", "VERBOSE", ")", "with", "open", "(", "bird_conf", ",", "'r'", ")", "as", "content", ":", "for", "line", "in", "content", ".", "readlines", "(", ")", ":", "variable_match", "=", "bird_variable_pattern", ".", "search", "(", "line", ")", "if", "variable_match", ":", "return", "variable_match", ".", "group", "(", "'name'", ")", "return", "None" ]
Return the variable name set in Bird configuration. The variable name in Bird configuration is set with the keyword 'define', here is an example: define ACAST_PS_ADVERTISE = and we exract the string between the word 'define' and the equals sign. Arguments: bird_conf (str): The absolute file name path of Bird configuration. Returns: The variable name as a string or None if it isn't found.
[ "Return", "the", "variable", "name", "set", "in", "Bird", "configuration", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L522-L555
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
create_bird_config_files
def create_bird_config_files(bird_configuration): """Create bird configuration files per IP version. Creates bird configuration files if they don't exist. It also creates the directories where we store the history of changes, if this functionality is enabled. Arguments: bird_configuration (dict): A dictionary with settings for bird. Returns: None Raises: ValueError if we can't create bird configuration files and the directory to store the history of changes in bird configuration file. """ for ip_version in bird_configuration: # This creates the file if it doesn't exist. config_file = bird_configuration[ip_version]['config_file'] try: touch(config_file) except OSError as exc: raise ValueError("failed to create {f}:{e}" .format(f=config_file, e=exc)) if bird_configuration[ip_version]['keep_changes']: history_dir = os.path.join(os.path.dirname(config_file), 'history') try: os.mkdir(history_dir) except FileExistsError: pass except OSError as exc: raise ValueError("failed to make directory {d} for keeping a " "history of changes for {b}:{e}" .format(d=history_dir, b=config_file, e=exc)) else: print("{d} is created".format(d=history_dir))
python
def create_bird_config_files(bird_configuration): """Create bird configuration files per IP version. Creates bird configuration files if they don't exist. It also creates the directories where we store the history of changes, if this functionality is enabled. Arguments: bird_configuration (dict): A dictionary with settings for bird. Returns: None Raises: ValueError if we can't create bird configuration files and the directory to store the history of changes in bird configuration file. """ for ip_version in bird_configuration: # This creates the file if it doesn't exist. config_file = bird_configuration[ip_version]['config_file'] try: touch(config_file) except OSError as exc: raise ValueError("failed to create {f}:{e}" .format(f=config_file, e=exc)) if bird_configuration[ip_version]['keep_changes']: history_dir = os.path.join(os.path.dirname(config_file), 'history') try: os.mkdir(history_dir) except FileExistsError: pass except OSError as exc: raise ValueError("failed to make directory {d} for keeping a " "history of changes for {b}:{e}" .format(d=history_dir, b=config_file, e=exc)) else: print("{d} is created".format(d=history_dir))
[ "def", "create_bird_config_files", "(", "bird_configuration", ")", ":", "for", "ip_version", "in", "bird_configuration", ":", "# This creates the file if it doesn't exist.", "config_file", "=", "bird_configuration", "[", "ip_version", "]", "[", "'config_file'", "]", "try", ":", "touch", "(", "config_file", ")", "except", "OSError", "as", "exc", ":", "raise", "ValueError", "(", "\"failed to create {f}:{e}\"", ".", "format", "(", "f", "=", "config_file", ",", "e", "=", "exc", ")", ")", "if", "bird_configuration", "[", "ip_version", "]", "[", "'keep_changes'", "]", ":", "history_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "config_file", ")", ",", "'history'", ")", "try", ":", "os", ".", "mkdir", "(", "history_dir", ")", "except", "FileExistsError", ":", "pass", "except", "OSError", "as", "exc", ":", "raise", "ValueError", "(", "\"failed to make directory {d} for keeping a \"", "\"history of changes for {b}:{e}\"", ".", "format", "(", "d", "=", "history_dir", ",", "b", "=", "config_file", ",", "e", "=", "exc", ")", ")", "else", ":", "print", "(", "\"{d} is created\"", ".", "format", "(", "d", "=", "history_dir", ")", ")" ]
Create bird configuration files per IP version. Creates bird configuration files if they don't exist. It also creates the directories where we store the history of changes, if this functionality is enabled. Arguments: bird_configuration (dict): A dictionary with settings for bird. Returns: None Raises: ValueError if we can't create bird configuration files and the directory to store the history of changes in bird configuration file.
[ "Create", "bird", "configuration", "files", "per", "IP", "version", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L558-L595
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
running
def running(processid): """Check the validity of a process ID. Arguments: processid (int): Process ID number. Returns: True if process ID is found otherwise False. """ try: # From kill(2) # If sig is 0 (the null signal), error checking is performed but no # signal is actually sent. The null signal can be used to check the # validity of pid os.kill(processid, 0) except OverflowError as exc: print("checking validity of pid ({p}) failed with: {e}" .format(p=processid, e=exc)) sys.exit(1) except OSError: return False else: return True
python
def running(processid): """Check the validity of a process ID. Arguments: processid (int): Process ID number. Returns: True if process ID is found otherwise False. """ try: # From kill(2) # If sig is 0 (the null signal), error checking is performed but no # signal is actually sent. The null signal can be used to check the # validity of pid os.kill(processid, 0) except OverflowError as exc: print("checking validity of pid ({p}) failed with: {e}" .format(p=processid, e=exc)) sys.exit(1) except OSError: return False else: return True
[ "def", "running", "(", "processid", ")", ":", "try", ":", "# From kill(2)", "# If sig is 0 (the null signal), error checking is performed but no", "# signal is actually sent. The null signal can be used to check the", "# validity of pid", "os", ".", "kill", "(", "processid", ",", "0", ")", "except", "OverflowError", "as", "exc", ":", "print", "(", "\"checking validity of pid ({p}) failed with: {e}\"", ".", "format", "(", "p", "=", "processid", ",", "e", "=", "exc", ")", ")", "sys", ".", "exit", "(", "1", ")", "except", "OSError", ":", "return", "False", "else", ":", "return", "True" ]
Check the validity of a process ID. Arguments: processid (int): Process ID number. Returns: True if process ID is found otherwise False.
[ "Check", "the", "validity", "of", "a", "process", "ID", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L598-L621
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
get_ip_prefixes_from_bird
def get_ip_prefixes_from_bird(filename): """Build a list of IP prefixes found in Bird configuration. Arguments: filename (str): The absolute path of the Bird configuration file. Notes: It can only parse a file with the following format define ACAST_PS_ADVERTISE = [ 10.189.200.155/32, 10.189.200.255/32 ]; Returns: A list of IP prefixes. """ prefixes = [] with open(filename, 'r') as bird_conf: lines = bird_conf.read() for line in lines.splitlines(): line = line.strip(', ') if valid_ip_prefix(line): prefixes.append(line) return prefixes
python
def get_ip_prefixes_from_bird(filename): """Build a list of IP prefixes found in Bird configuration. Arguments: filename (str): The absolute path of the Bird configuration file. Notes: It can only parse a file with the following format define ACAST_PS_ADVERTISE = [ 10.189.200.155/32, 10.189.200.255/32 ]; Returns: A list of IP prefixes. """ prefixes = [] with open(filename, 'r') as bird_conf: lines = bird_conf.read() for line in lines.splitlines(): line = line.strip(', ') if valid_ip_prefix(line): prefixes.append(line) return prefixes
[ "def", "get_ip_prefixes_from_bird", "(", "filename", ")", ":", "prefixes", "=", "[", "]", "with", "open", "(", "filename", ",", "'r'", ")", "as", "bird_conf", ":", "lines", "=", "bird_conf", ".", "read", "(", ")", "for", "line", "in", "lines", ".", "splitlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", "', '", ")", "if", "valid_ip_prefix", "(", "line", ")", ":", "prefixes", ".", "append", "(", "line", ")", "return", "prefixes" ]
Build a list of IP prefixes found in Bird configuration. Arguments: filename (str): The absolute path of the Bird configuration file. Notes: It can only parse a file with the following format define ACAST_PS_ADVERTISE = [ 10.189.200.155/32, 10.189.200.255/32 ]; Returns: A list of IP prefixes.
[ "Build", "a", "list", "of", "IP", "prefixes", "found", "in", "Bird", "configuration", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L624-L652
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
reconfigure_bird
def reconfigure_bird(cmd): """Reconfigure BIRD daemon. Arguments: cmd (string): A command to trigger a reconfiguration of Bird daemon Notes: Runs 'birdc configure' to reconfigure BIRD. Some useful information on how birdc tool works: -- Returns a non-zero exit code only when it can't access BIRD daemon via the control socket (/var/run/bird.ctl). This happens when BIRD daemon is either down or when the caller of birdc doesn't have access to the control socket. -- Returns zero exit code when reconfigure fails due to invalid configuration. Thus, we catch this case by looking at the output and not at the exit code. -- Returns zero exit code when reconfigure was successful. -- Should never timeout, if it does then it is a bug. """ log = logging.getLogger(PROGRAM_NAME) cmd = shlex.split(cmd) log.info("reconfiguring BIRD by running %s", ' '.join(cmd)) try: output = subprocess.check_output( cmd, timeout=2, stderr=subprocess.STDOUT, universal_newlines=True, ) except subprocess.TimeoutExpired: log.error("reconfiguring bird timed out") return except subprocess.CalledProcessError as error: # birdc returns 0 even when it fails due to invalid config, # but it returns 1 when BIRD is down. log.error("reconfiguring BIRD failed, either BIRD daemon is down or " "we don't have privileges to reconfigure it (sudo problems?)" ":%s", error.output.strip()) return except FileNotFoundError as error: log.error("reconfiguring BIRD failed with: %s", error) return # 'Reconfigured' string will be in the output if and only if conf is valid. pattern = re.compile('^Reconfigured$', re.MULTILINE) if pattern.search(str(output)): log.info('reconfigured BIRD daemon') else: # We will end up here only if we generated an invalid conf # or someone broke bird.conf. log.error("reconfiguring BIRD returned error, most likely we generated" " an invalid configuration file or Bird configuration in is " "broken:%s", output)
python
def reconfigure_bird(cmd): """Reconfigure BIRD daemon. Arguments: cmd (string): A command to trigger a reconfiguration of Bird daemon Notes: Runs 'birdc configure' to reconfigure BIRD. Some useful information on how birdc tool works: -- Returns a non-zero exit code only when it can't access BIRD daemon via the control socket (/var/run/bird.ctl). This happens when BIRD daemon is either down or when the caller of birdc doesn't have access to the control socket. -- Returns zero exit code when reconfigure fails due to invalid configuration. Thus, we catch this case by looking at the output and not at the exit code. -- Returns zero exit code when reconfigure was successful. -- Should never timeout, if it does then it is a bug. """ log = logging.getLogger(PROGRAM_NAME) cmd = shlex.split(cmd) log.info("reconfiguring BIRD by running %s", ' '.join(cmd)) try: output = subprocess.check_output( cmd, timeout=2, stderr=subprocess.STDOUT, universal_newlines=True, ) except subprocess.TimeoutExpired: log.error("reconfiguring bird timed out") return except subprocess.CalledProcessError as error: # birdc returns 0 even when it fails due to invalid config, # but it returns 1 when BIRD is down. log.error("reconfiguring BIRD failed, either BIRD daemon is down or " "we don't have privileges to reconfigure it (sudo problems?)" ":%s", error.output.strip()) return except FileNotFoundError as error: log.error("reconfiguring BIRD failed with: %s", error) return # 'Reconfigured' string will be in the output if and only if conf is valid. pattern = re.compile('^Reconfigured$', re.MULTILINE) if pattern.search(str(output)): log.info('reconfigured BIRD daemon') else: # We will end up here only if we generated an invalid conf # or someone broke bird.conf. log.error("reconfiguring BIRD returned error, most likely we generated" " an invalid configuration file or Bird configuration in is " "broken:%s", output)
[ "def", "reconfigure_bird", "(", "cmd", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "cmd", "=", "shlex", ".", "split", "(", "cmd", ")", "log", ".", "info", "(", "\"reconfiguring BIRD by running %s\"", ",", "' '", ".", "join", "(", "cmd", ")", ")", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "cmd", ",", "timeout", "=", "2", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ",", ")", "except", "subprocess", ".", "TimeoutExpired", ":", "log", ".", "error", "(", "\"reconfiguring bird timed out\"", ")", "return", "except", "subprocess", ".", "CalledProcessError", "as", "error", ":", "# birdc returns 0 even when it fails due to invalid config,", "# but it returns 1 when BIRD is down.", "log", ".", "error", "(", "\"reconfiguring BIRD failed, either BIRD daemon is down or \"", "\"we don't have privileges to reconfigure it (sudo problems?)\"", "\":%s\"", ",", "error", ".", "output", ".", "strip", "(", ")", ")", "return", "except", "FileNotFoundError", "as", "error", ":", "log", ".", "error", "(", "\"reconfiguring BIRD failed with: %s\"", ",", "error", ")", "return", "# 'Reconfigured' string will be in the output if and only if conf is valid.", "pattern", "=", "re", ".", "compile", "(", "'^Reconfigured$'", ",", "re", ".", "MULTILINE", ")", "if", "pattern", ".", "search", "(", "str", "(", "output", ")", ")", ":", "log", ".", "info", "(", "'reconfigured BIRD daemon'", ")", "else", ":", "# We will end up here only if we generated an invalid conf", "# or someone broke bird.conf.", "log", ".", "error", "(", "\"reconfiguring BIRD returned error, most likely we generated\"", "\" an invalid configuration file or Bird configuration in is \"", "\"broken:%s\"", ",", "output", ")" ]
Reconfigure BIRD daemon. Arguments: cmd (string): A command to trigger a reconfiguration of Bird daemon Notes: Runs 'birdc configure' to reconfigure BIRD. Some useful information on how birdc tool works: -- Returns a non-zero exit code only when it can't access BIRD daemon via the control socket (/var/run/bird.ctl). This happens when BIRD daemon is either down or when the caller of birdc doesn't have access to the control socket. -- Returns zero exit code when reconfigure fails due to invalid configuration. Thus, we catch this case by looking at the output and not at the exit code. -- Returns zero exit code when reconfigure was successful. -- Should never timeout, if it does then it is a bug.
[ "Reconfigure", "BIRD", "daemon", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L725-L778
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
write_temp_bird_conf
def write_temp_bird_conf(dummy_ip_prefix, config_file, variable_name, prefixes): """Write in a temporary file the list of IP-Prefixes. A failure to create and write the temporary file will exit main program. Arguments: dummy_ip_prefix (str): The dummy IP prefix, which must be always config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration prefixes (list): The list of IP-Prefixes to write Returns: The filename of the temporary file """ log = logging.getLogger(PROGRAM_NAME) comment = ("# {i} is a dummy IP Prefix. It should NOT be used and " "REMOVED from the constant.".format(i=dummy_ip_prefix)) # the temporary file must be on the same filesystem as the bird config # as we use os.rename to perform an atomic update on the bird config. # Thus, we create it in the same directory that bird config is stored. tm_file = os.path.join(os.path.dirname(config_file), str(time.time())) log.debug("going to write to %s", tm_file) try: with open(tm_file, 'w') as tmpf: tmpf.write("# Generated {t} by {n} (pid={p})\n" .format(t=datetime.datetime.now(), n=PROGRAM_NAME, p=os.getpid())) tmpf.write("{c}\n".format(c=comment)) tmpf.write("define {n} =\n".format(n=variable_name)) tmpf.write("{s}[\n".format(s=4 * ' ')) # all entries of the array need a trailing comma except the last # one. A single element array doesn't need a trailing comma. tmpf.write(',\n'.join([' '*8 + n for n in prefixes])) tmpf.write("\n{s}];\n".format(s=4 * ' ')) except OSError as error: log.critical("failed to write temporary file %s: %s. This is a FATAL " "error, this exiting main program", tm_file, error) sys.exit(1) else: return tm_file
python
def write_temp_bird_conf(dummy_ip_prefix, config_file, variable_name, prefixes): """Write in a temporary file the list of IP-Prefixes. A failure to create and write the temporary file will exit main program. Arguments: dummy_ip_prefix (str): The dummy IP prefix, which must be always config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration prefixes (list): The list of IP-Prefixes to write Returns: The filename of the temporary file """ log = logging.getLogger(PROGRAM_NAME) comment = ("# {i} is a dummy IP Prefix. It should NOT be used and " "REMOVED from the constant.".format(i=dummy_ip_prefix)) # the temporary file must be on the same filesystem as the bird config # as we use os.rename to perform an atomic update on the bird config. # Thus, we create it in the same directory that bird config is stored. tm_file = os.path.join(os.path.dirname(config_file), str(time.time())) log.debug("going to write to %s", tm_file) try: with open(tm_file, 'w') as tmpf: tmpf.write("# Generated {t} by {n} (pid={p})\n" .format(t=datetime.datetime.now(), n=PROGRAM_NAME, p=os.getpid())) tmpf.write("{c}\n".format(c=comment)) tmpf.write("define {n} =\n".format(n=variable_name)) tmpf.write("{s}[\n".format(s=4 * ' ')) # all entries of the array need a trailing comma except the last # one. A single element array doesn't need a trailing comma. tmpf.write(',\n'.join([' '*8 + n for n in prefixes])) tmpf.write("\n{s}];\n".format(s=4 * ' ')) except OSError as error: log.critical("failed to write temporary file %s: %s. This is a FATAL " "error, this exiting main program", tm_file, error) sys.exit(1) else: return tm_file
[ "def", "write_temp_bird_conf", "(", "dummy_ip_prefix", ",", "config_file", ",", "variable_name", ",", "prefixes", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "comment", "=", "(", "\"# {i} is a dummy IP Prefix. It should NOT be used and \"", "\"REMOVED from the constant.\"", ".", "format", "(", "i", "=", "dummy_ip_prefix", ")", ")", "# the temporary file must be on the same filesystem as the bird config", "# as we use os.rename to perform an atomic update on the bird config.", "# Thus, we create it in the same directory that bird config is stored.", "tm_file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "config_file", ")", ",", "str", "(", "time", ".", "time", "(", ")", ")", ")", "log", ".", "debug", "(", "\"going to write to %s\"", ",", "tm_file", ")", "try", ":", "with", "open", "(", "tm_file", ",", "'w'", ")", "as", "tmpf", ":", "tmpf", ".", "write", "(", "\"# Generated {t} by {n} (pid={p})\\n\"", ".", "format", "(", "t", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "n", "=", "PROGRAM_NAME", ",", "p", "=", "os", ".", "getpid", "(", ")", ")", ")", "tmpf", ".", "write", "(", "\"{c}\\n\"", ".", "format", "(", "c", "=", "comment", ")", ")", "tmpf", ".", "write", "(", "\"define {n} =\\n\"", ".", "format", "(", "n", "=", "variable_name", ")", ")", "tmpf", ".", "write", "(", "\"{s}[\\n\"", ".", "format", "(", "s", "=", "4", "*", "' '", ")", ")", "# all entries of the array need a trailing comma except the last", "# one. A single element array doesn't need a trailing comma.", "tmpf", ".", "write", "(", "',\\n'", ".", "join", "(", "[", "' '", "*", "8", "+", "n", "for", "n", "in", "prefixes", "]", ")", ")", "tmpf", ".", "write", "(", "\"\\n{s}];\\n\"", ".", "format", "(", "s", "=", "4", "*", "' '", ")", ")", "except", "OSError", "as", "error", ":", "log", ".", "critical", "(", "\"failed to write temporary file %s: %s. This is a FATAL \"", "\"error, this exiting main program\"", ",", "tm_file", ",", "error", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "return", "tm_file" ]
Write in a temporary file the list of IP-Prefixes. A failure to create and write the temporary file will exit main program. Arguments: dummy_ip_prefix (str): The dummy IP prefix, which must be always config_file (str): The file name of bird configuration variable_name (str): The name of the variable set in bird configuration prefixes (list): The list of IP-Prefixes to write Returns: The filename of the temporary file
[ "Write", "in", "a", "temporary", "file", "the", "list", "of", "IP", "-", "Prefixes", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L781-L827
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
archive_bird_conf
def archive_bird_conf(config_file, changes_counter): """Keep a history of Bird configuration files. Arguments: config_file (str): file name of bird configuration changes_counter (int): number of configuration files to keep in the history """ log = logging.getLogger(PROGRAM_NAME) history_dir = os.path.join(os.path.dirname(config_file), 'history') dst = os.path.join(history_dir, str(time.time())) log.debug("coping %s to %s", config_file, dst) history = [x for x in os.listdir(history_dir) if os.path.isfile(os.path.join(history_dir, x))] if len(history) > changes_counter: log.info("threshold of %s is reached, removing old files", changes_counter) for _file in sorted(history, reverse=True)[changes_counter - 1:]: _path = os.path.join(history_dir, _file) try: os.remove(_path) except OSError as exc: log.warning("failed to remove %s: %s", _file, exc) else: log.info("removed %s", _path) try: shutil.copy2(config_file, dst) except OSError as exc: log.warning("failed to copy %s to %s: %s", config_file, dst, exc)
python
def archive_bird_conf(config_file, changes_counter): """Keep a history of Bird configuration files. Arguments: config_file (str): file name of bird configuration changes_counter (int): number of configuration files to keep in the history """ log = logging.getLogger(PROGRAM_NAME) history_dir = os.path.join(os.path.dirname(config_file), 'history') dst = os.path.join(history_dir, str(time.time())) log.debug("coping %s to %s", config_file, dst) history = [x for x in os.listdir(history_dir) if os.path.isfile(os.path.join(history_dir, x))] if len(history) > changes_counter: log.info("threshold of %s is reached, removing old files", changes_counter) for _file in sorted(history, reverse=True)[changes_counter - 1:]: _path = os.path.join(history_dir, _file) try: os.remove(_path) except OSError as exc: log.warning("failed to remove %s: %s", _file, exc) else: log.info("removed %s", _path) try: shutil.copy2(config_file, dst) except OSError as exc: log.warning("failed to copy %s to %s: %s", config_file, dst, exc)
[ "def", "archive_bird_conf", "(", "config_file", ",", "changes_counter", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "history_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "config_file", ")", ",", "'history'", ")", "dst", "=", "os", ".", "path", ".", "join", "(", "history_dir", ",", "str", "(", "time", ".", "time", "(", ")", ")", ")", "log", ".", "debug", "(", "\"coping %s to %s\"", ",", "config_file", ",", "dst", ")", "history", "=", "[", "x", "for", "x", "in", "os", ".", "listdir", "(", "history_dir", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "history_dir", ",", "x", ")", ")", "]", "if", "len", "(", "history", ")", ">", "changes_counter", ":", "log", ".", "info", "(", "\"threshold of %s is reached, removing old files\"", ",", "changes_counter", ")", "for", "_file", "in", "sorted", "(", "history", ",", "reverse", "=", "True", ")", "[", "changes_counter", "-", "1", ":", "]", ":", "_path", "=", "os", ".", "path", ".", "join", "(", "history_dir", ",", "_file", ")", "try", ":", "os", ".", "remove", "(", "_path", ")", "except", "OSError", "as", "exc", ":", "log", ".", "warning", "(", "\"failed to remove %s: %s\"", ",", "_file", ",", "exc", ")", "else", ":", "log", ".", "info", "(", "\"removed %s\"", ",", "_path", ")", "try", ":", "shutil", ".", "copy2", "(", "config_file", ",", "dst", ")", "except", "OSError", "as", "exc", ":", "log", ".", "warning", "(", "\"failed to copy %s to %s: %s\"", ",", "config_file", ",", "dst", ",", "exc", ")" ]
Keep a history of Bird configuration files. Arguments: config_file (str): file name of bird configuration changes_counter (int): number of configuration files to keep in the history
[ "Keep", "a", "history", "of", "Bird", "configuration", "files", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L830-L861
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
update_pidfile
def update_pidfile(pidfile): """Update pidfile. Notice: We should call this function only after we have successfully acquired a lock and never before. It exits main program if it fails to parse and/or write pidfile. Arguments: pidfile (str): pidfile to update """ try: with open(pidfile, mode='r') as _file: pid = _file.read(1024).rstrip() try: pid = int(pid) except ValueError: print("cleaning stale pidfile with invalid data:'{}'".format(pid)) write_pid(pidfile) else: if running(pid): # This is to catch migration issues from 0.7.x to 0.8.x # version, where old process is still around as it failed to # be stopped. Since newer version has a different locking # mechanism, we can end up with both versions running. # In order to avoid this situation we refuse to startup. sys.exit("process {} is already running".format(pid)) else: # pidfile exists with a PID for a process that is not running. # Let's update PID. print("updating stale processID({}) in pidfile".format(pid)) write_pid(pidfile) except FileNotFoundError: # Either it's 1st time we run or previous run was terminated # successfully. print("creating pidfile {f}".format(f=pidfile)) write_pid(pidfile) except OSError as exc: sys.exit("failed to update pidfile:{e}".format(e=exc))
python
def update_pidfile(pidfile): """Update pidfile. Notice: We should call this function only after we have successfully acquired a lock and never before. It exits main program if it fails to parse and/or write pidfile. Arguments: pidfile (str): pidfile to update """ try: with open(pidfile, mode='r') as _file: pid = _file.read(1024).rstrip() try: pid = int(pid) except ValueError: print("cleaning stale pidfile with invalid data:'{}'".format(pid)) write_pid(pidfile) else: if running(pid): # This is to catch migration issues from 0.7.x to 0.8.x # version, where old process is still around as it failed to # be stopped. Since newer version has a different locking # mechanism, we can end up with both versions running. # In order to avoid this situation we refuse to startup. sys.exit("process {} is already running".format(pid)) else: # pidfile exists with a PID for a process that is not running. # Let's update PID. print("updating stale processID({}) in pidfile".format(pid)) write_pid(pidfile) except FileNotFoundError: # Either it's 1st time we run or previous run was terminated # successfully. print("creating pidfile {f}".format(f=pidfile)) write_pid(pidfile) except OSError as exc: sys.exit("failed to update pidfile:{e}".format(e=exc))
[ "def", "update_pidfile", "(", "pidfile", ")", ":", "try", ":", "with", "open", "(", "pidfile", ",", "mode", "=", "'r'", ")", "as", "_file", ":", "pid", "=", "_file", ".", "read", "(", "1024", ")", ".", "rstrip", "(", ")", "try", ":", "pid", "=", "int", "(", "pid", ")", "except", "ValueError", ":", "print", "(", "\"cleaning stale pidfile with invalid data:'{}'\"", ".", "format", "(", "pid", ")", ")", "write_pid", "(", "pidfile", ")", "else", ":", "if", "running", "(", "pid", ")", ":", "# This is to catch migration issues from 0.7.x to 0.8.x", "# version, where old process is still around as it failed to", "# be stopped. Since newer version has a different locking", "# mechanism, we can end up with both versions running.", "# In order to avoid this situation we refuse to startup.", "sys", ".", "exit", "(", "\"process {} is already running\"", ".", "format", "(", "pid", ")", ")", "else", ":", "# pidfile exists with a PID for a process that is not running.", "# Let's update PID.", "print", "(", "\"updating stale processID({}) in pidfile\"", ".", "format", "(", "pid", ")", ")", "write_pid", "(", "pidfile", ")", "except", "FileNotFoundError", ":", "# Either it's 1st time we run or previous run was terminated", "# successfully.", "print", "(", "\"creating pidfile {f}\"", ".", "format", "(", "f", "=", "pidfile", ")", ")", "write_pid", "(", "pidfile", ")", "except", "OSError", "as", "exc", ":", "sys", ".", "exit", "(", "\"failed to update pidfile:{e}\"", ".", "format", "(", "e", "=", "exc", ")", ")" ]
Update pidfile. Notice: We should call this function only after we have successfully acquired a lock and never before. It exits main program if it fails to parse and/or write pidfile. Arguments: pidfile (str): pidfile to update
[ "Update", "pidfile", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L864-L904
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
write_pid
def write_pid(pidfile): """Write processID to the pidfile. Notice: It exits main program if it fails to write pidfile. Arguments: pidfile (str): pidfile to update """ pid = str(os.getpid()) try: with open(pidfile, mode='w') as _file: print("writing processID {p} to pidfile".format(p=pid)) _file.write(pid) except OSError as exc: sys.exit("failed to write pidfile:{e}".format(e=exc))
python
def write_pid(pidfile): """Write processID to the pidfile. Notice: It exits main program if it fails to write pidfile. Arguments: pidfile (str): pidfile to update """ pid = str(os.getpid()) try: with open(pidfile, mode='w') as _file: print("writing processID {p} to pidfile".format(p=pid)) _file.write(pid) except OSError as exc: sys.exit("failed to write pidfile:{e}".format(e=exc))
[ "def", "write_pid", "(", "pidfile", ")", ":", "pid", "=", "str", "(", "os", ".", "getpid", "(", ")", ")", "try", ":", "with", "open", "(", "pidfile", ",", "mode", "=", "'w'", ")", "as", "_file", ":", "print", "(", "\"writing processID {p} to pidfile\"", ".", "format", "(", "p", "=", "pid", ")", ")", "_file", ".", "write", "(", "pid", ")", "except", "OSError", "as", "exc", ":", "sys", ".", "exit", "(", "\"failed to write pidfile:{e}\"", ".", "format", "(", "e", "=", "exc", ")", ")" ]
Write processID to the pidfile. Notice: It exits main program if it fails to write pidfile. Arguments: pidfile (str): pidfile to update
[ "Write", "processID", "to", "the", "pidfile", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L907-L923
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
shutdown
def shutdown(pidfile, signalnb=None, frame=None): """Clean up pidfile upon shutdown. Notice: We should register this function as signal handler for the following termination signals: SIGHUP SIGTERM SIGABRT SIGINT Arguments: pidfile (str): pidfile to remove signalnb (int): The ID of signal frame (obj): Frame object at the time of receiving the signal """ log = logging.getLogger(PROGRAM_NAME) log.info("received %s at %s", signalnb, frame) log.info("going to remove pidfile %s", pidfile) # no point to catch possible errors when we delete the pid file os.unlink(pidfile) log.info('shutdown is complete') sys.exit(0)
python
def shutdown(pidfile, signalnb=None, frame=None): """Clean up pidfile upon shutdown. Notice: We should register this function as signal handler for the following termination signals: SIGHUP SIGTERM SIGABRT SIGINT Arguments: pidfile (str): pidfile to remove signalnb (int): The ID of signal frame (obj): Frame object at the time of receiving the signal """ log = logging.getLogger(PROGRAM_NAME) log.info("received %s at %s", signalnb, frame) log.info("going to remove pidfile %s", pidfile) # no point to catch possible errors when we delete the pid file os.unlink(pidfile) log.info('shutdown is complete') sys.exit(0)
[ "def", "shutdown", "(", "pidfile", ",", "signalnb", "=", "None", ",", "frame", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "log", ".", "info", "(", "\"received %s at %s\"", ",", "signalnb", ",", "frame", ")", "log", ".", "info", "(", "\"going to remove pidfile %s\"", ",", "pidfile", ")", "# no point to catch possible errors when we delete the pid file", "os", ".", "unlink", "(", "pidfile", ")", "log", ".", "info", "(", "'shutdown is complete'", ")", "sys", ".", "exit", "(", "0", ")" ]
Clean up pidfile upon shutdown. Notice: We should register this function as signal handler for the following termination signals: SIGHUP SIGTERM SIGABRT SIGINT Arguments: pidfile (str): pidfile to remove signalnb (int): The ID of signal frame (obj): Frame object at the time of receiving the signal
[ "Clean", "up", "pidfile", "upon", "shutdown", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L926-L949
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
setup_logger
def setup_logger(config): """Configure the logging environment. Notice: By default logging will go to STDOUT and messages for unhandled exceptions or crashes will go to STDERR. If log_file and/or log_server is set then we don't log to STDOUT. Messages for unhandled exceptions or crashes can only go to either STDERR or to stderr_file or to stderr_log_server. Arguments: config (obj): A configparser object which holds our configuration. Returns: A logger with all possible handlers configured. """ logger = logging.getLogger(PROGRAM_NAME) num_level = getattr( logging, config.get('daemon', 'loglevel').upper(), # pylint: disable=no-member None ) logger.setLevel(num_level) lengths = [] for section in config: lengths.append(len(section)) width = sorted(lengths)[-1] + 1 def log_format(): """Produce a log format line.""" supported_keys = [ 'asctime', 'levelname', 'process', # 'funcName', # 'lineno', 'threadName', 'message', ] return ' '.join(['%({0:s})'.format(i) for i in supported_keys]) custom_format = log_format() json_formatter = CustomJsonFormatter(custom_format, prefix=PROGRAM_NAME + ': ') formatter = logging.Formatter( '%(asctime)s {program}[%(process)d] %(levelname)-8s ' '%(threadName)-{width}s %(message)s' .format(program=PROGRAM_NAME, width=width) ) # Register logging handlers based on configuration. if config.has_option('daemon', 'log_file'): file_handler = logging.handlers.RotatingFileHandler( config.get('daemon', 'log_file'), maxBytes=config.getint('daemon', 'log_maxbytes'), backupCount=config.getint('daemon', 'log_backups') ) if config.getboolean('daemon', 'json_log_file'): file_handler.setFormatter(json_formatter) else: file_handler.setFormatter(formatter) logger.addHandler(file_handler) if config.has_option('daemon', 'log_server'): udp_handler = logging.handlers.SysLogHandler( ( config.get('daemon', 'log_server'), config.getint('daemon', 'log_server_port') ) ) if config.getboolean('daemon', 'json_log_server'): udp_handler.setFormatter(json_formatter) else: udp_handler.setFormatter(formatter) logger.addHandler(udp_handler) # Log to STDOUT if and only if log_file and log_server aren't enabled if (not config.has_option('daemon', 'log_file') and not config.has_option('daemon', 'log_server')): stream_handler = logging.StreamHandler() if config.getboolean('daemon', 'json_stdout'): stream_handler.setFormatter(json_formatter) else: stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) # We can redirect STDERR only to one destination. if config.has_option('daemon', 'stderr_file'): sys.stderr = CustomRotatingFileLogger( filepath=config.get('daemon', 'stderr_file'), maxbytes=config.getint('daemon', 'log_maxbytes'), backupcount=config.getint('daemon', 'log_backups') ) elif (config.has_option('daemon', 'stderr_log_server') and not config.has_option('daemon', 'stderr_file')): sys.stderr = CustomUdpLogger( server=config.get('daemon', 'log_server'), port=config.getint('daemon', 'log_server_port') ) else: print('messages for unhandled exceptions will go to STDERR') return logger
python
def setup_logger(config): """Configure the logging environment. Notice: By default logging will go to STDOUT and messages for unhandled exceptions or crashes will go to STDERR. If log_file and/or log_server is set then we don't log to STDOUT. Messages for unhandled exceptions or crashes can only go to either STDERR or to stderr_file or to stderr_log_server. Arguments: config (obj): A configparser object which holds our configuration. Returns: A logger with all possible handlers configured. """ logger = logging.getLogger(PROGRAM_NAME) num_level = getattr( logging, config.get('daemon', 'loglevel').upper(), # pylint: disable=no-member None ) logger.setLevel(num_level) lengths = [] for section in config: lengths.append(len(section)) width = sorted(lengths)[-1] + 1 def log_format(): """Produce a log format line.""" supported_keys = [ 'asctime', 'levelname', 'process', # 'funcName', # 'lineno', 'threadName', 'message', ] return ' '.join(['%({0:s})'.format(i) for i in supported_keys]) custom_format = log_format() json_formatter = CustomJsonFormatter(custom_format, prefix=PROGRAM_NAME + ': ') formatter = logging.Formatter( '%(asctime)s {program}[%(process)d] %(levelname)-8s ' '%(threadName)-{width}s %(message)s' .format(program=PROGRAM_NAME, width=width) ) # Register logging handlers based on configuration. if config.has_option('daemon', 'log_file'): file_handler = logging.handlers.RotatingFileHandler( config.get('daemon', 'log_file'), maxBytes=config.getint('daemon', 'log_maxbytes'), backupCount=config.getint('daemon', 'log_backups') ) if config.getboolean('daemon', 'json_log_file'): file_handler.setFormatter(json_formatter) else: file_handler.setFormatter(formatter) logger.addHandler(file_handler) if config.has_option('daemon', 'log_server'): udp_handler = logging.handlers.SysLogHandler( ( config.get('daemon', 'log_server'), config.getint('daemon', 'log_server_port') ) ) if config.getboolean('daemon', 'json_log_server'): udp_handler.setFormatter(json_formatter) else: udp_handler.setFormatter(formatter) logger.addHandler(udp_handler) # Log to STDOUT if and only if log_file and log_server aren't enabled if (not config.has_option('daemon', 'log_file') and not config.has_option('daemon', 'log_server')): stream_handler = logging.StreamHandler() if config.getboolean('daemon', 'json_stdout'): stream_handler.setFormatter(json_formatter) else: stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) # We can redirect STDERR only to one destination. if config.has_option('daemon', 'stderr_file'): sys.stderr = CustomRotatingFileLogger( filepath=config.get('daemon', 'stderr_file'), maxbytes=config.getint('daemon', 'log_maxbytes'), backupcount=config.getint('daemon', 'log_backups') ) elif (config.has_option('daemon', 'stderr_log_server') and not config.has_option('daemon', 'stderr_file')): sys.stderr = CustomUdpLogger( server=config.get('daemon', 'log_server'), port=config.getint('daemon', 'log_server_port') ) else: print('messages for unhandled exceptions will go to STDERR') return logger
[ "def", "setup_logger", "(", "config", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "num_level", "=", "getattr", "(", "logging", ",", "config", ".", "get", "(", "'daemon'", ",", "'loglevel'", ")", ".", "upper", "(", ")", ",", "# pylint: disable=no-member", "None", ")", "logger", ".", "setLevel", "(", "num_level", ")", "lengths", "=", "[", "]", "for", "section", "in", "config", ":", "lengths", ".", "append", "(", "len", "(", "section", ")", ")", "width", "=", "sorted", "(", "lengths", ")", "[", "-", "1", "]", "+", "1", "def", "log_format", "(", ")", ":", "\"\"\"Produce a log format line.\"\"\"", "supported_keys", "=", "[", "'asctime'", ",", "'levelname'", ",", "'process'", ",", "# 'funcName',", "# 'lineno',", "'threadName'", ",", "'message'", ",", "]", "return", "' '", ".", "join", "(", "[", "'%({0:s})'", ".", "format", "(", "i", ")", "for", "i", "in", "supported_keys", "]", ")", "custom_format", "=", "log_format", "(", ")", "json_formatter", "=", "CustomJsonFormatter", "(", "custom_format", ",", "prefix", "=", "PROGRAM_NAME", "+", "': '", ")", "formatter", "=", "logging", ".", "Formatter", "(", "'%(asctime)s {program}[%(process)d] %(levelname)-8s '", "'%(threadName)-{width}s %(message)s'", ".", "format", "(", "program", "=", "PROGRAM_NAME", ",", "width", "=", "width", ")", ")", "# Register logging handlers based on configuration.", "if", "config", ".", "has_option", "(", "'daemon'", ",", "'log_file'", ")", ":", "file_handler", "=", "logging", ".", "handlers", ".", "RotatingFileHandler", "(", "config", ".", "get", "(", "'daemon'", ",", "'log_file'", ")", ",", "maxBytes", "=", "config", ".", "getint", "(", "'daemon'", ",", "'log_maxbytes'", ")", ",", "backupCount", "=", "config", ".", "getint", "(", "'daemon'", ",", "'log_backups'", ")", ")", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'json_log_file'", ")", ":", "file_handler", ".", "setFormatter", "(", "json_formatter", ")", "else", ":", "file_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "file_handler", ")", "if", "config", ".", "has_option", "(", "'daemon'", ",", "'log_server'", ")", ":", "udp_handler", "=", "logging", ".", "handlers", ".", "SysLogHandler", "(", "(", "config", ".", "get", "(", "'daemon'", ",", "'log_server'", ")", ",", "config", ".", "getint", "(", "'daemon'", ",", "'log_server_port'", ")", ")", ")", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'json_log_server'", ")", ":", "udp_handler", ".", "setFormatter", "(", "json_formatter", ")", "else", ":", "udp_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "udp_handler", ")", "# Log to STDOUT if and only if log_file and log_server aren't enabled", "if", "(", "not", "config", ".", "has_option", "(", "'daemon'", ",", "'log_file'", ")", "and", "not", "config", ".", "has_option", "(", "'daemon'", ",", "'log_server'", ")", ")", ":", "stream_handler", "=", "logging", ".", "StreamHandler", "(", ")", "if", "config", ".", "getboolean", "(", "'daemon'", ",", "'json_stdout'", ")", ":", "stream_handler", ".", "setFormatter", "(", "json_formatter", ")", "else", ":", "stream_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "stream_handler", ")", "# We can redirect STDERR only to one destination.", "if", "config", ".", "has_option", "(", "'daemon'", ",", "'stderr_file'", ")", ":", "sys", ".", "stderr", "=", "CustomRotatingFileLogger", "(", "filepath", "=", "config", ".", "get", "(", "'daemon'", ",", "'stderr_file'", ")", ",", "maxbytes", "=", "config", ".", "getint", "(", "'daemon'", ",", "'log_maxbytes'", ")", ",", "backupcount", "=", "config", ".", "getint", "(", "'daemon'", ",", "'log_backups'", ")", ")", "elif", "(", "config", ".", "has_option", "(", "'daemon'", ",", "'stderr_log_server'", ")", "and", "not", "config", ".", "has_option", "(", "'daemon'", ",", "'stderr_file'", ")", ")", ":", "sys", ".", "stderr", "=", "CustomUdpLogger", "(", "server", "=", "config", ".", "get", "(", "'daemon'", ",", "'log_server'", ")", ",", "port", "=", "config", ".", "getint", "(", "'daemon'", ",", "'log_server_port'", ")", ")", "else", ":", "print", "(", "'messages for unhandled exceptions will go to STDERR'", ")", "return", "logger" ]
Configure the logging environment. Notice: By default logging will go to STDOUT and messages for unhandled exceptions or crashes will go to STDERR. If log_file and/or log_server is set then we don't log to STDOUT. Messages for unhandled exceptions or crashes can only go to either STDERR or to stderr_file or to stderr_log_server. Arguments: config (obj): A configparser object which holds our configuration. Returns: A logger with all possible handlers configured.
[ "Configure", "the", "logging", "environment", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L952-L1059
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
run_custom_bird_reconfigure
def run_custom_bird_reconfigure(operation): """Reconfigure BIRD daemon by running a custom command. It adds one argument to the command, either "up" or "down". If command times out then we kill it. In order to avoid leaving any orphan processes, that may have been started by the command, we start a new session when we invoke the command and then we kill process group of that session. Arguments: operation (obj): Either a AddOperation or DeleteOperation object. """ log = logging.getLogger(PROGRAM_NAME) if isinstance(operation, AddOperation): status = 'up' else: status = 'down' cmd = shlex.split(operation.bird_reconfigure_cmd + " " + status) log.info("reconfiguring BIRD by running custom command %s", ' '.join(cmd)) try: proc = subprocess.Popen(cmd, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, errs = proc.communicate( timeout=operation.bird_reconfigure_timeout ) except OSError as exc: log.error("reconfiguring BIRD failed with: %s", exc) except subprocess.TimeoutExpired as exc: log.error("reconfiguring bird timed out") if proc.poll() is None: # if process is still alive try: os.killpg(os.getpgid(proc.pid), signal.SIGTERM) except PermissionError as exc: log.error("failed to terminate custom bird command: %s", exc) else: if proc.returncode != 0: log.error("reconfiguring BIRD failed with return code: %s and " "stderr: %s", proc.returncode, errs) else: log.info("custom command successfully reconfigured Bird")
python
def run_custom_bird_reconfigure(operation): """Reconfigure BIRD daemon by running a custom command. It adds one argument to the command, either "up" or "down". If command times out then we kill it. In order to avoid leaving any orphan processes, that may have been started by the command, we start a new session when we invoke the command and then we kill process group of that session. Arguments: operation (obj): Either a AddOperation or DeleteOperation object. """ log = logging.getLogger(PROGRAM_NAME) if isinstance(operation, AddOperation): status = 'up' else: status = 'down' cmd = shlex.split(operation.bird_reconfigure_cmd + " " + status) log.info("reconfiguring BIRD by running custom command %s", ' '.join(cmd)) try: proc = subprocess.Popen(cmd, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, errs = proc.communicate( timeout=operation.bird_reconfigure_timeout ) except OSError as exc: log.error("reconfiguring BIRD failed with: %s", exc) except subprocess.TimeoutExpired as exc: log.error("reconfiguring bird timed out") if proc.poll() is None: # if process is still alive try: os.killpg(os.getpgid(proc.pid), signal.SIGTERM) except PermissionError as exc: log.error("failed to terminate custom bird command: %s", exc) else: if proc.returncode != 0: log.error("reconfiguring BIRD failed with return code: %s and " "stderr: %s", proc.returncode, errs) else: log.info("custom command successfully reconfigured Bird")
[ "def", "run_custom_bird_reconfigure", "(", "operation", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "PROGRAM_NAME", ")", "if", "isinstance", "(", "operation", ",", "AddOperation", ")", ":", "status", "=", "'up'", "else", ":", "status", "=", "'down'", "cmd", "=", "shlex", ".", "split", "(", "operation", ".", "bird_reconfigure_cmd", "+", "\" \"", "+", "status", ")", "log", ".", "info", "(", "\"reconfiguring BIRD by running custom command %s\"", ",", "' '", ".", "join", "(", "cmd", ")", ")", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "start_new_session", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "_", ",", "errs", "=", "proc", ".", "communicate", "(", "timeout", "=", "operation", ".", "bird_reconfigure_timeout", ")", "except", "OSError", "as", "exc", ":", "log", ".", "error", "(", "\"reconfiguring BIRD failed with: %s\"", ",", "exc", ")", "except", "subprocess", ".", "TimeoutExpired", "as", "exc", ":", "log", ".", "error", "(", "\"reconfiguring bird timed out\"", ")", "if", "proc", ".", "poll", "(", ")", "is", "None", ":", "# if process is still alive", "try", ":", "os", ".", "killpg", "(", "os", ".", "getpgid", "(", "proc", ".", "pid", ")", ",", "signal", ".", "SIGTERM", ")", "except", "PermissionError", "as", "exc", ":", "log", ".", "error", "(", "\"failed to terminate custom bird command: %s\"", ",", "exc", ")", "else", ":", "if", "proc", ".", "returncode", "!=", "0", ":", "log", ".", "error", "(", "\"reconfiguring BIRD failed with return code: %s and \"", "\"stderr: %s\"", ",", "proc", ".", "returncode", ",", "errs", ")", "else", ":", "log", ".", "info", "(", "\"custom command successfully reconfigured Bird\"", ")" ]
Reconfigure BIRD daemon by running a custom command. It adds one argument to the command, either "up" or "down". If command times out then we kill it. In order to avoid leaving any orphan processes, that may have been started by the command, we start a new session when we invoke the command and then we kill process group of that session. Arguments: operation (obj): Either a AddOperation or DeleteOperation object.
[ "Reconfigure", "BIRD", "daemon", "by", "running", "a", "custom", "command", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L1206-L1248
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
AddOperation.update
def update(self, prefixes): """Add a value to the list. Arguments: prefixes(list): A list to add the value """ if self.ip_prefix not in prefixes: prefixes.append(self.ip_prefix) self.log.info("announcing %s for %s", self.ip_prefix, self.name) return True return False
python
def update(self, prefixes): """Add a value to the list. Arguments: prefixes(list): A list to add the value """ if self.ip_prefix not in prefixes: prefixes.append(self.ip_prefix) self.log.info("announcing %s for %s", self.ip_prefix, self.name) return True return False
[ "def", "update", "(", "self", ",", "prefixes", ")", ":", "if", "self", ".", "ip_prefix", "not", "in", "prefixes", ":", "prefixes", ".", "append", "(", "self", ".", "ip_prefix", ")", "self", ".", "log", ".", "info", "(", "\"announcing %s for %s\"", ",", "self", ".", "ip_prefix", ",", "self", ".", "name", ")", "return", "True", "return", "False" ]
Add a value to the list. Arguments: prefixes(list): A list to add the value
[ "Add", "a", "value", "to", "the", "list", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L690-L701
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
CustomLogger.write
def write(self, string): """Erase newline from a string and write to the logger.""" string = string.rstrip() if string: # Don't log empty lines self.logger.critical(string)
python
def write(self, string): """Erase newline from a string and write to the logger.""" string = string.rstrip() if string: # Don't log empty lines self.logger.critical(string)
[ "def", "write", "(", "self", ",", "string", ")", ":", "string", "=", "string", ".", "rstrip", "(", ")", "if", "string", ":", "# Don't log empty lines", "self", ".", "logger", ".", "critical", "(", "string", ")" ]
Erase newline from a string and write to the logger.
[ "Erase", "newline", "from", "a", "string", "and", "write", "to", "the", "logger", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L1097-L1101
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/utils.py
CustomJsonFormatter.process_log_record
def process_log_record(self, log_record): """Add customer record keys and rename threadName key.""" log_record["version"] = __version__ log_record["program"] = PROGRAM_NAME log_record["service_name"] = log_record.pop('threadName', None) # return jsonlogger.JsonFormatter.process_log_record(self, log_record) return log_record
python
def process_log_record(self, log_record): """Add customer record keys and rename threadName key.""" log_record["version"] = __version__ log_record["program"] = PROGRAM_NAME log_record["service_name"] = log_record.pop('threadName', None) # return jsonlogger.JsonFormatter.process_log_record(self, log_record) return log_record
[ "def", "process_log_record", "(", "self", ",", "log_record", ")", ":", "log_record", "[", "\"version\"", "]", "=", "__version__", "log_record", "[", "\"program\"", "]", "=", "PROGRAM_NAME", "log_record", "[", "\"service_name\"", "]", "=", "log_record", ".", "pop", "(", "'threadName'", ",", "None", ")", "# return jsonlogger.JsonFormatter.process_log_record(self, log_record)", "return", "log_record" ]
Add customer record keys and rename threadName key.
[ "Add", "customer", "record", "keys", "and", "rename", "threadName", "key", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/utils.py#L1176-L1183
train
sashahart/vex
vex/main.py
get_vexrc
def get_vexrc(options, environ): """Get a representation of the contents of the config file. :returns: a Vexrc instance. """ # Complain if user specified nonexistent file with --config. # But we don't want to complain just because ~/.vexrc doesn't exist. if options.config and not os.path.exists(options.config): raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config)) filename = options.config or os.path.expanduser('~/.vexrc') vexrc = config.Vexrc.from_file(filename, environ) return vexrc
python
def get_vexrc(options, environ): """Get a representation of the contents of the config file. :returns: a Vexrc instance. """ # Complain if user specified nonexistent file with --config. # But we don't want to complain just because ~/.vexrc doesn't exist. if options.config and not os.path.exists(options.config): raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config)) filename = options.config or os.path.expanduser('~/.vexrc') vexrc = config.Vexrc.from_file(filename, environ) return vexrc
[ "def", "get_vexrc", "(", "options", ",", "environ", ")", ":", "# Complain if user specified nonexistent file with --config.", "# But we don't want to complain just because ~/.vexrc doesn't exist.", "if", "options", ".", "config", "and", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "config", ")", ":", "raise", "exceptions", ".", "InvalidVexrc", "(", "\"nonexistent config: {0!r}\"", ".", "format", "(", "options", ".", "config", ")", ")", "filename", "=", "options", ".", "config", "or", "os", ".", "path", ".", "expanduser", "(", "'~/.vexrc'", ")", "vexrc", "=", "config", ".", "Vexrc", ".", "from_file", "(", "filename", ",", "environ", ")", "return", "vexrc" ]
Get a representation of the contents of the config file. :returns: a Vexrc instance.
[ "Get", "a", "representation", "of", "the", "contents", "of", "the", "config", "file", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L15-L27
train
sashahart/vex
vex/main.py
get_cwd
def get_cwd(options): """Discover what directory the command should run in. """ if not options.cwd: return None if not os.path.exists(options.cwd): raise exceptions.InvalidCwd( "can't --cwd to invalid path {0!r}".format(options.cwd)) return options.cwd
python
def get_cwd(options): """Discover what directory the command should run in. """ if not options.cwd: return None if not os.path.exists(options.cwd): raise exceptions.InvalidCwd( "can't --cwd to invalid path {0!r}".format(options.cwd)) return options.cwd
[ "def", "get_cwd", "(", "options", ")", ":", "if", "not", "options", ".", "cwd", ":", "return", "None", "if", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "cwd", ")", ":", "raise", "exceptions", ".", "InvalidCwd", "(", "\"can't --cwd to invalid path {0!r}\"", ".", "format", "(", "options", ".", "cwd", ")", ")", "return", "options", ".", "cwd" ]
Discover what directory the command should run in.
[ "Discover", "what", "directory", "the", "command", "should", "run", "in", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L30-L38
train
sashahart/vex
vex/main.py
get_virtualenv_path
def get_virtualenv_path(ve_base, ve_name): """Check a virtualenv path, raising exceptions to explain problems. """ if not ve_base: raise exceptions.NoVirtualenvsDirectory( "could not figure out a virtualenvs directory. " "make sure $HOME is set, or $WORKON_HOME," " or set virtualenvs=something in your .vexrc") # Using this requires get_ve_base to pass through nonexistent dirs if not os.path.exists(ve_base): message = ( "virtualenvs directory {0!r} not found. " "Create it or use vex --make to get started." ).format(ve_base) raise exceptions.NoVirtualenvsDirectory(message) if not ve_name: raise exceptions.InvalidVirtualenv("no virtualenv name") # n.b.: if ve_name is absolute, ve_base is discarded by os.path.join, # and an absolute path will be accepted as first arg. # So we check if they gave an absolute path as ve_name. # But we don't want this error if $PWD == $WORKON_HOME, # in which case 'foo' is a valid relative path to virtualenv foo. ve_path = os.path.join(ve_base, ve_name) if ve_path == ve_name and os.path.basename(ve_name) != ve_name: raise exceptions.InvalidVirtualenv( 'To run in a virtualenv by its path, ' 'use "vex --path {0}"'.format(ve_path)) ve_path = os.path.abspath(ve_path) if not os.path.exists(ve_path): raise exceptions.InvalidVirtualenv( "no virtualenv found at {0!r}.".format(ve_path)) return ve_path
python
def get_virtualenv_path(ve_base, ve_name): """Check a virtualenv path, raising exceptions to explain problems. """ if not ve_base: raise exceptions.NoVirtualenvsDirectory( "could not figure out a virtualenvs directory. " "make sure $HOME is set, or $WORKON_HOME," " or set virtualenvs=something in your .vexrc") # Using this requires get_ve_base to pass through nonexistent dirs if not os.path.exists(ve_base): message = ( "virtualenvs directory {0!r} not found. " "Create it or use vex --make to get started." ).format(ve_base) raise exceptions.NoVirtualenvsDirectory(message) if not ve_name: raise exceptions.InvalidVirtualenv("no virtualenv name") # n.b.: if ve_name is absolute, ve_base is discarded by os.path.join, # and an absolute path will be accepted as first arg. # So we check if they gave an absolute path as ve_name. # But we don't want this error if $PWD == $WORKON_HOME, # in which case 'foo' is a valid relative path to virtualenv foo. ve_path = os.path.join(ve_base, ve_name) if ve_path == ve_name and os.path.basename(ve_name) != ve_name: raise exceptions.InvalidVirtualenv( 'To run in a virtualenv by its path, ' 'use "vex --path {0}"'.format(ve_path)) ve_path = os.path.abspath(ve_path) if not os.path.exists(ve_path): raise exceptions.InvalidVirtualenv( "no virtualenv found at {0!r}.".format(ve_path)) return ve_path
[ "def", "get_virtualenv_path", "(", "ve_base", ",", "ve_name", ")", ":", "if", "not", "ve_base", ":", "raise", "exceptions", ".", "NoVirtualenvsDirectory", "(", "\"could not figure out a virtualenvs directory. \"", "\"make sure $HOME is set, or $WORKON_HOME,\"", "\" or set virtualenvs=something in your .vexrc\"", ")", "# Using this requires get_ve_base to pass through nonexistent dirs", "if", "not", "os", ".", "path", ".", "exists", "(", "ve_base", ")", ":", "message", "=", "(", "\"virtualenvs directory {0!r} not found. \"", "\"Create it or use vex --make to get started.\"", ")", ".", "format", "(", "ve_base", ")", "raise", "exceptions", ".", "NoVirtualenvsDirectory", "(", "message", ")", "if", "not", "ve_name", ":", "raise", "exceptions", ".", "InvalidVirtualenv", "(", "\"no virtualenv name\"", ")", "# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,", "# and an absolute path will be accepted as first arg.", "# So we check if they gave an absolute path as ve_name.", "# But we don't want this error if $PWD == $WORKON_HOME,", "# in which case 'foo' is a valid relative path to virtualenv foo.", "ve_path", "=", "os", ".", "path", ".", "join", "(", "ve_base", ",", "ve_name", ")", "if", "ve_path", "==", "ve_name", "and", "os", ".", "path", ".", "basename", "(", "ve_name", ")", "!=", "ve_name", ":", "raise", "exceptions", ".", "InvalidVirtualenv", "(", "'To run in a virtualenv by its path, '", "'use \"vex --path {0}\"'", ".", "format", "(", "ve_path", ")", ")", "ve_path", "=", "os", ".", "path", ".", "abspath", "(", "ve_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "ve_path", ")", ":", "raise", "exceptions", ".", "InvalidVirtualenv", "(", "\"no virtualenv found at {0!r}.\"", ".", "format", "(", "ve_path", ")", ")", "return", "ve_path" ]
Check a virtualenv path, raising exceptions to explain problems.
[ "Check", "a", "virtualenv", "path", "raising", "exceptions", "to", "explain", "problems", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L53-L88
train
sashahart/vex
vex/main.py
get_command
def get_command(options, vexrc, environ): """Get a command to run. :returns: a list of strings representing a command to be passed to Popen. """ command = options.rest if not command: command = vexrc.get_shell(environ) if command and command[0].startswith('--'): raise exceptions.InvalidCommand( "don't put flags like '%s' after the virtualenv name." % command[0]) if not command: raise exceptions.InvalidCommand("no command given") return command
python
def get_command(options, vexrc, environ): """Get a command to run. :returns: a list of strings representing a command to be passed to Popen. """ command = options.rest if not command: command = vexrc.get_shell(environ) if command and command[0].startswith('--'): raise exceptions.InvalidCommand( "don't put flags like '%s' after the virtualenv name." % command[0]) if not command: raise exceptions.InvalidCommand("no command given") return command
[ "def", "get_command", "(", "options", ",", "vexrc", ",", "environ", ")", ":", "command", "=", "options", ".", "rest", "if", "not", "command", ":", "command", "=", "vexrc", ".", "get_shell", "(", "environ", ")", "if", "command", "and", "command", "[", "0", "]", ".", "startswith", "(", "'--'", ")", ":", "raise", "exceptions", ".", "InvalidCommand", "(", "\"don't put flags like '%s' after the virtualenv name.\"", "%", "command", "[", "0", "]", ")", "if", "not", "command", ":", "raise", "exceptions", ".", "InvalidCommand", "(", "\"no command given\"", ")", "return", "command" ]
Get a command to run. :returns: a list of strings representing a command to be passed to Popen.
[ "Get", "a", "command", "to", "run", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L91-L106
train
sashahart/vex
vex/main.py
main
def main(): """The main command-line entry point, with system interactions. """ argv = sys.argv[1:] returncode = 1 try: returncode = _main(os.environ, argv) except exceptions.InvalidArgument as error: if error.message: sys.stderr.write("Error: " + error.message + '\n') else: raise sys.exit(returncode)
python
def main(): """The main command-line entry point, with system interactions. """ argv = sys.argv[1:] returncode = 1 try: returncode = _main(os.environ, argv) except exceptions.InvalidArgument as error: if error.message: sys.stderr.write("Error: " + error.message + '\n') else: raise sys.exit(returncode)
[ "def", "main", "(", ")", ":", "argv", "=", "sys", ".", "argv", "[", "1", ":", "]", "returncode", "=", "1", "try", ":", "returncode", "=", "_main", "(", "os", ".", "environ", ",", "argv", ")", "except", "exceptions", ".", "InvalidArgument", "as", "error", ":", "if", "error", ".", "message", ":", "sys", ".", "stderr", ".", "write", "(", "\"Error: \"", "+", "error", ".", "message", "+", "'\\n'", ")", "else", ":", "raise", "sys", ".", "exit", "(", "returncode", ")" ]
The main command-line entry point, with system interactions.
[ "The", "main", "command", "-", "line", "entry", "point", "with", "system", "interactions", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L185-L197
train
unixsurfer/anycast_healthchecker
contrib/nagios/check_anycast_healthchecker.py
get_processid
def get_processid(config): """Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile """ pidfile = config.get('daemon', 'pidfile', fallback=None) if pidfile is None: raise ValueError("Configuration doesn't have pidfile option!") try: with open(pidfile, 'r') as _file: pid = _file.read().rstrip() try: pid = int(pid) except ValueError: raise ValueError("stale pid file with invalid data:{}" .format(pid)) else: if pid in [-1, 1]: raise ValueError("invalid PID ({})".format(pid)) else: return pid except OSError as exc: if exc.errno == 2: print("CRITICAL: anycast-healthchecker could be down as pid file " "{} doesn't exist".format(pidfile)) sys.exit(2) else: raise ValueError("error while reading pid file:{}".format(exc))
python
def get_processid(config): """Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile """ pidfile = config.get('daemon', 'pidfile', fallback=None) if pidfile is None: raise ValueError("Configuration doesn't have pidfile option!") try: with open(pidfile, 'r') as _file: pid = _file.read().rstrip() try: pid = int(pid) except ValueError: raise ValueError("stale pid file with invalid data:{}" .format(pid)) else: if pid in [-1, 1]: raise ValueError("invalid PID ({})".format(pid)) else: return pid except OSError as exc: if exc.errno == 2: print("CRITICAL: anycast-healthchecker could be down as pid file " "{} doesn't exist".format(pidfile)) sys.exit(2) else: raise ValueError("error while reading pid file:{}".format(exc))
[ "def", "get_processid", "(", "config", ")", ":", "pidfile", "=", "config", ".", "get", "(", "'daemon'", ",", "'pidfile'", ",", "fallback", "=", "None", ")", "if", "pidfile", "is", "None", ":", "raise", "ValueError", "(", "\"Configuration doesn't have pidfile option!\"", ")", "try", ":", "with", "open", "(", "pidfile", ",", "'r'", ")", "as", "_file", ":", "pid", "=", "_file", ".", "read", "(", ")", ".", "rstrip", "(", ")", "try", ":", "pid", "=", "int", "(", "pid", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"stale pid file with invalid data:{}\"", ".", "format", "(", "pid", ")", ")", "else", ":", "if", "pid", "in", "[", "-", "1", ",", "1", "]", ":", "raise", "ValueError", "(", "\"invalid PID ({})\"", ".", "format", "(", "pid", ")", ")", "else", ":", "return", "pid", "except", "OSError", "as", "exc", ":", "if", "exc", ".", "errno", "==", "2", ":", "print", "(", "\"CRITICAL: anycast-healthchecker could be down as pid file \"", "\"{} doesn't exist\"", ".", "format", "(", "pidfile", ")", ")", "sys", ".", "exit", "(", "2", ")", "else", ":", "raise", "ValueError", "(", "\"error while reading pid file:{}\"", ".", "format", "(", "exc", ")", ")" ]
Return process id of anycast-healthchecker. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. Returns: The process id found in the pid file Raises: ValueError in the following cases - pidfile option is missing from the configuration - pid is either -1 or 1 - stale pidfile, either with no data or invalid data - failure to read pidfile
[ "Return", "process", "id", "of", "anycast", "-", "healthchecker", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/contrib/nagios/check_anycast_healthchecker.py#L22-L63
train
unixsurfer/anycast_healthchecker
contrib/nagios/check_anycast_healthchecker.py
parse_services
def parse_services(config, services): """Parse configuration to return number of enabled service checks. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. services (list): A list of section names which holds configuration for each service check Returns: A number (int) of enabled service checks. """ enabled = 0 for service in services: check_disabled = config.getboolean(service, 'check_disabled') if not check_disabled: enabled += 1 return enabled
python
def parse_services(config, services): """Parse configuration to return number of enabled service checks. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. services (list): A list of section names which holds configuration for each service check Returns: A number (int) of enabled service checks. """ enabled = 0 for service in services: check_disabled = config.getboolean(service, 'check_disabled') if not check_disabled: enabled += 1 return enabled
[ "def", "parse_services", "(", "config", ",", "services", ")", ":", "enabled", "=", "0", "for", "service", "in", "services", ":", "check_disabled", "=", "config", ".", "getboolean", "(", "service", ",", "'check_disabled'", ")", "if", "not", "check_disabled", ":", "enabled", "+=", "1", "return", "enabled" ]
Parse configuration to return number of enabled service checks. Arguments: config (obj): A configparser object with the configuration of anycast-healthchecker. services (list): A list of section names which holds configuration for each service check Returns: A number (int) of enabled service checks.
[ "Parse", "configuration", "to", "return", "number", "of", "enabled", "service", "checks", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/contrib/nagios/check_anycast_healthchecker.py#L90-L109
train
unixsurfer/anycast_healthchecker
contrib/nagios/check_anycast_healthchecker.py
main
def main(): """Run check. anycast-healthchecker is a multi-threaded software and for each service check it holds a thread. If a thread dies then the service is not monitored anymore and the route for the IP associated with service it wont be withdrawn in case service goes down in the meantime. """ arguments = docopt(__doc__) config_file = '/etc/anycast-healthchecker.conf' config_dir = '/etc/anycast-healthchecker.d' config = configparser.ConfigParser() config_files = [config_file] config_files.extend(glob.glob(os.path.join(config_dir, '*.conf'))) config.read(config_files) try: pid = get_processid(config) except ValueError as exc: print("UNKNOWN: {e}".format(e=exc)) sys.exit(3) else: process_up = running(pid) if not process_up: print("CRITICAL: anycast-healthchecker with pid ({p}) isn't running" .format(p=pid)) sys.exit(3) services = config.sections() services.remove('daemon') if not services: print("UNKNOWN: No service checks are configured") sys.exit(3) enabled_service_checks = parse_services(config, services) if enabled_service_checks == 0: print("OK: Number of service checks is zero, no threads are running") sys.exit(0) else: # parent process plus nummber of threads for each service check configured_threads = enabled_service_checks + 1 cmd = ['/bin/ps', 'h', '-T', '-p', '{n}'.format(n=pid)] try: if arguments['-v']: print("running {}".format(' '.join(cmd))) out = subprocess.check_output(cmd, timeout=1) except subprocess.CalledProcessError as exc: print("UNKNOWN: running '{c}' failed with return code: {r}" .format(c=' '.join(cmd), r=exc.returncode)) sys.exit(3) except subprocess.TimeoutExpired: print("UNKNOWN: running '{}' timed out".format(' '.join(cmd))) sys.exit(3) else: output_lines = out.splitlines() if arguments['-v']: for line in output_lines: print(line) running_threads = len(output_lines) if running_threads == configured_threads: print("OK: UP (pid={p}) and all threads ({t}) are running" .format(p=pid, t=configured_threads - 1)) sys.exit(0) elif running_threads - 1 == 0: # minus parent process print("CRITICAL: No threads are running OpDocs ANYCAST-03") sys.exit(2) else: print("CRITICAL: Found {n} running threads while configured " "number of threads is {c} OpDocs ANYCAST-03" .format(n=running_threads - 1, c=configured_threads - 1)) sys.exit(2)
python
def main(): """Run check. anycast-healthchecker is a multi-threaded software and for each service check it holds a thread. If a thread dies then the service is not monitored anymore and the route for the IP associated with service it wont be withdrawn in case service goes down in the meantime. """ arguments = docopt(__doc__) config_file = '/etc/anycast-healthchecker.conf' config_dir = '/etc/anycast-healthchecker.d' config = configparser.ConfigParser() config_files = [config_file] config_files.extend(glob.glob(os.path.join(config_dir, '*.conf'))) config.read(config_files) try: pid = get_processid(config) except ValueError as exc: print("UNKNOWN: {e}".format(e=exc)) sys.exit(3) else: process_up = running(pid) if not process_up: print("CRITICAL: anycast-healthchecker with pid ({p}) isn't running" .format(p=pid)) sys.exit(3) services = config.sections() services.remove('daemon') if not services: print("UNKNOWN: No service checks are configured") sys.exit(3) enabled_service_checks = parse_services(config, services) if enabled_service_checks == 0: print("OK: Number of service checks is zero, no threads are running") sys.exit(0) else: # parent process plus nummber of threads for each service check configured_threads = enabled_service_checks + 1 cmd = ['/bin/ps', 'h', '-T', '-p', '{n}'.format(n=pid)] try: if arguments['-v']: print("running {}".format(' '.join(cmd))) out = subprocess.check_output(cmd, timeout=1) except subprocess.CalledProcessError as exc: print("UNKNOWN: running '{c}' failed with return code: {r}" .format(c=' '.join(cmd), r=exc.returncode)) sys.exit(3) except subprocess.TimeoutExpired: print("UNKNOWN: running '{}' timed out".format(' '.join(cmd))) sys.exit(3) else: output_lines = out.splitlines() if arguments['-v']: for line in output_lines: print(line) running_threads = len(output_lines) if running_threads == configured_threads: print("OK: UP (pid={p}) and all threads ({t}) are running" .format(p=pid, t=configured_threads - 1)) sys.exit(0) elif running_threads - 1 == 0: # minus parent process print("CRITICAL: No threads are running OpDocs ANYCAST-03") sys.exit(2) else: print("CRITICAL: Found {n} running threads while configured " "number of threads is {c} OpDocs ANYCAST-03" .format(n=running_threads - 1, c=configured_threads - 1)) sys.exit(2)
[ "def", "main", "(", ")", ":", "arguments", "=", "docopt", "(", "__doc__", ")", "config_file", "=", "'/etc/anycast-healthchecker.conf'", "config_dir", "=", "'/etc/anycast-healthchecker.d'", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config_files", "=", "[", "config_file", "]", "config_files", ".", "extend", "(", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "config_dir", ",", "'*.conf'", ")", ")", ")", "config", ".", "read", "(", "config_files", ")", "try", ":", "pid", "=", "get_processid", "(", "config", ")", "except", "ValueError", "as", "exc", ":", "print", "(", "\"UNKNOWN: {e}\"", ".", "format", "(", "e", "=", "exc", ")", ")", "sys", ".", "exit", "(", "3", ")", "else", ":", "process_up", "=", "running", "(", "pid", ")", "if", "not", "process_up", ":", "print", "(", "\"CRITICAL: anycast-healthchecker with pid ({p}) isn't running\"", ".", "format", "(", "p", "=", "pid", ")", ")", "sys", ".", "exit", "(", "3", ")", "services", "=", "config", ".", "sections", "(", ")", "services", ".", "remove", "(", "'daemon'", ")", "if", "not", "services", ":", "print", "(", "\"UNKNOWN: No service checks are configured\"", ")", "sys", ".", "exit", "(", "3", ")", "enabled_service_checks", "=", "parse_services", "(", "config", ",", "services", ")", "if", "enabled_service_checks", "==", "0", ":", "print", "(", "\"OK: Number of service checks is zero, no threads are running\"", ")", "sys", ".", "exit", "(", "0", ")", "else", ":", "# parent process plus nummber of threads for each service check", "configured_threads", "=", "enabled_service_checks", "+", "1", "cmd", "=", "[", "'/bin/ps'", ",", "'h'", ",", "'-T'", ",", "'-p'", ",", "'{n}'", ".", "format", "(", "n", "=", "pid", ")", "]", "try", ":", "if", "arguments", "[", "'-v'", "]", ":", "print", "(", "\"running {}\"", ".", "format", "(", "' '", ".", "join", "(", "cmd", ")", ")", ")", "out", "=", "subprocess", ".", "check_output", "(", "cmd", ",", "timeout", "=", "1", ")", "except", "subprocess", ".", "CalledProcessError", "as", "exc", ":", "print", "(", "\"UNKNOWN: running '{c}' failed with return code: {r}\"", ".", "format", "(", "c", "=", "' '", ".", "join", "(", "cmd", ")", ",", "r", "=", "exc", ".", "returncode", ")", ")", "sys", ".", "exit", "(", "3", ")", "except", "subprocess", ".", "TimeoutExpired", ":", "print", "(", "\"UNKNOWN: running '{}' timed out\"", ".", "format", "(", "' '", ".", "join", "(", "cmd", ")", ")", ")", "sys", ".", "exit", "(", "3", ")", "else", ":", "output_lines", "=", "out", ".", "splitlines", "(", ")", "if", "arguments", "[", "'-v'", "]", ":", "for", "line", "in", "output_lines", ":", "print", "(", "line", ")", "running_threads", "=", "len", "(", "output_lines", ")", "if", "running_threads", "==", "configured_threads", ":", "print", "(", "\"OK: UP (pid={p}) and all threads ({t}) are running\"", ".", "format", "(", "p", "=", "pid", ",", "t", "=", "configured_threads", "-", "1", ")", ")", "sys", ".", "exit", "(", "0", ")", "elif", "running_threads", "-", "1", "==", "0", ":", "# minus parent process", "print", "(", "\"CRITICAL: No threads are running OpDocs ANYCAST-03\"", ")", "sys", ".", "exit", "(", "2", ")", "else", ":", "print", "(", "\"CRITICAL: Found {n} running threads while configured \"", "\"number of threads is {c} OpDocs ANYCAST-03\"", ".", "format", "(", "n", "=", "running_threads", "-", "1", ",", "c", "=", "configured_threads", "-", "1", ")", ")", "sys", ".", "exit", "(", "2", ")" ]
Run check. anycast-healthchecker is a multi-threaded software and for each service check it holds a thread. If a thread dies then the service is not monitored anymore and the route for the IP associated with service it wont be withdrawn in case service goes down in the meantime.
[ "Run", "check", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/contrib/nagios/check_anycast_healthchecker.py#L112-L184
train
sashahart/vex
vex/shell_config.py
scary_path
def scary_path(path): """Whitelist the WORKON_HOME strings we're willing to substitute in to strings that we provide for user's shell to evaluate. If it smells at all bad, return True. """ if not path: return True assert isinstance(path, bytes) return not NOT_SCARY.match(path)
python
def scary_path(path): """Whitelist the WORKON_HOME strings we're willing to substitute in to strings that we provide for user's shell to evaluate. If it smells at all bad, return True. """ if not path: return True assert isinstance(path, bytes) return not NOT_SCARY.match(path)
[ "def", "scary_path", "(", "path", ")", ":", "if", "not", "path", ":", "return", "True", "assert", "isinstance", "(", "path", ",", "bytes", ")", "return", "not", "NOT_SCARY", ".", "match", "(", "path", ")" ]
Whitelist the WORKON_HOME strings we're willing to substitute in to strings that we provide for user's shell to evaluate. If it smells at all bad, return True.
[ "Whitelist", "the", "WORKON_HOME", "strings", "we", "re", "willing", "to", "substitute", "in", "to", "strings", "that", "we", "provide", "for", "user", "s", "shell", "to", "evaluate", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/shell_config.py#L22-L31
train
sashahart/vex
vex/shell_config.py
shell_config_for
def shell_config_for(shell, vexrc, environ): """return completion config for the named shell. """ here = os.path.dirname(os.path.abspath(__file__)) path = os.path.join(here, 'shell_configs', shell) try: with open(path, 'rb') as inp: data = inp.read() except FileNotFoundError as error: if error.errno != 2: raise return b'' ve_base = vexrc.get_ve_base(environ).encode('ascii') if ve_base and not scary_path(ve_base) and os.path.exists(ve_base): data = data.replace(b'$WORKON_HOME', ve_base) return data
python
def shell_config_for(shell, vexrc, environ): """return completion config for the named shell. """ here = os.path.dirname(os.path.abspath(__file__)) path = os.path.join(here, 'shell_configs', shell) try: with open(path, 'rb') as inp: data = inp.read() except FileNotFoundError as error: if error.errno != 2: raise return b'' ve_base = vexrc.get_ve_base(environ).encode('ascii') if ve_base and not scary_path(ve_base) and os.path.exists(ve_base): data = data.replace(b'$WORKON_HOME', ve_base) return data
[ "def", "shell_config_for", "(", "shell", ",", "vexrc", ",", "environ", ")", ":", "here", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "path", "=", "os", ".", "path", ".", "join", "(", "here", ",", "'shell_configs'", ",", "shell", ")", "try", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "inp", ":", "data", "=", "inp", ".", "read", "(", ")", "except", "FileNotFoundError", "as", "error", ":", "if", "error", ".", "errno", "!=", "2", ":", "raise", "return", "b''", "ve_base", "=", "vexrc", ".", "get_ve_base", "(", "environ", ")", ".", "encode", "(", "'ascii'", ")", "if", "ve_base", "and", "not", "scary_path", "(", "ve_base", ")", "and", "os", ".", "path", ".", "exists", "(", "ve_base", ")", ":", "data", "=", "data", ".", "replace", "(", "b'$WORKON_HOME'", ",", "ve_base", ")", "return", "data" ]
return completion config for the named shell.
[ "return", "completion", "config", "for", "the", "named", "shell", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/shell_config.py#L34-L49
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/servicecheck.py
ServiceCheck._run_check
def _run_check(self): """Execute a check command. Returns: True if the exit code of the command is 0 otherwise False. """ cmd = shlex.split(self.config['check_cmd']) self.log.info("running %s", ' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) start_time = time.time() try: outs, errs = proc.communicate(timeout=self.config['check_timeout']) except subprocess.TimeoutExpired: self.log.error("check timed out") if proc.poll() is None: try: proc.kill() except PermissionError: self.log.warning("failed to kill check due to adequate " "access rights, check could be running " "under another user(root) via sudo") return False else: msg = "check duration {t:.3f}ms".format( t=(time.time() - start_time) * 1000) self.log.info(msg) if proc.returncode != 0: self.log.info("stderr from the check %s", errs) self.log.info("stdout from the check %s", outs) return proc.returncode == 0
python
def _run_check(self): """Execute a check command. Returns: True if the exit code of the command is 0 otherwise False. """ cmd = shlex.split(self.config['check_cmd']) self.log.info("running %s", ' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) start_time = time.time() try: outs, errs = proc.communicate(timeout=self.config['check_timeout']) except subprocess.TimeoutExpired: self.log.error("check timed out") if proc.poll() is None: try: proc.kill() except PermissionError: self.log.warning("failed to kill check due to adequate " "access rights, check could be running " "under another user(root) via sudo") return False else: msg = "check duration {t:.3f}ms".format( t=(time.time() - start_time) * 1000) self.log.info(msg) if proc.returncode != 0: self.log.info("stderr from the check %s", errs) self.log.info("stdout from the check %s", outs) return proc.returncode == 0
[ "def", "_run_check", "(", "self", ")", ":", "cmd", "=", "shlex", ".", "split", "(", "self", ".", "config", "[", "'check_cmd'", "]", ")", "self", ".", "log", ".", "info", "(", "\"running %s\"", ",", "' '", ".", "join", "(", "cmd", ")", ")", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "start_time", "=", "time", ".", "time", "(", ")", "try", ":", "outs", ",", "errs", "=", "proc", ".", "communicate", "(", "timeout", "=", "self", ".", "config", "[", "'check_timeout'", "]", ")", "except", "subprocess", ".", "TimeoutExpired", ":", "self", ".", "log", ".", "error", "(", "\"check timed out\"", ")", "if", "proc", ".", "poll", "(", ")", "is", "None", ":", "try", ":", "proc", ".", "kill", "(", ")", "except", "PermissionError", ":", "self", ".", "log", ".", "warning", "(", "\"failed to kill check due to adequate \"", "\"access rights, check could be running \"", "\"under another user(root) via sudo\"", ")", "return", "False", "else", ":", "msg", "=", "\"check duration {t:.3f}ms\"", ".", "format", "(", "t", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", ")", "self", ".", "log", ".", "info", "(", "msg", ")", "if", "proc", ".", "returncode", "!=", "0", ":", "self", ".", "log", ".", "info", "(", "\"stderr from the check %s\"", ",", "errs", ")", "self", ".", "log", ".", "info", "(", "\"stdout from the check %s\"", ",", "outs", ")", "return", "proc", ".", "returncode", "==", "0" ]
Execute a check command. Returns: True if the exit code of the command is 0 otherwise False.
[ "Execute", "a", "check", "command", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/servicecheck.py#L88-L123
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/servicecheck.py
ServiceCheck._ip_assigned
def _ip_assigned(self): """Check if IP prefix is assigned to loopback interface. Returns: True if IP prefix found assigned otherwise False. """ output = [] cmd = [ '/sbin/ip', 'address', 'show', 'dev', self.config['interface'], 'to', self.ip_with_prefixlen, ] if self.ip_check_disabled: self.log.info("checking for IP assignment on interface %s is " "disabled", self.config['interface']) return True self.log.debug("running %s", ' '.join(cmd)) try: output = subprocess.check_output( cmd, universal_newlines=True, timeout=1) except subprocess.CalledProcessError as error: self.log.error("error checking IP-PREFIX %s: %s", cmd, error.output) # Because it is unlikely to ever get an error we return True return True except subprocess.TimeoutExpired: self.log.error("timeout running %s", ' '.join(cmd)) # Because it is unlikely to ever get a timeout we return True return True except ValueError as error: # We have been getting intermittent ValueErrors, see here # gist.github.com/unixsurfer/67db620d87f667423f6f6e3a04e0bff5 # It has happened ~5 times and this code is executed from multiple # threads and every ~10secs on several (~40) production servers for # more than 18months. # It could be a bug in Python or system returns corrupted data. # As a consequence of the raised exception thread dies and the # service isn't monitored anymore!. So, we now catch the exception. # While checking if an IP is assigned, we get an error unrelated to # that prevents us from knowing if it's assigned. We simply don't # know. A retry logic could be a more proper solution. self.log.error("running %s raised ValueError exception:%s", ' '.join(cmd), error) return True else: if self.ip_with_prefixlen in output: # pylint: disable=E1135,R1705 msg = "{i} assigned to loopback interface".format( i=self.ip_with_prefixlen) self.log.debug(msg) return True else: msg = ("{i} isn't assigned to {d} interface" .format(i=self.ip_with_prefixlen, d=self.config['interface'])) self.log.warning(msg) return False self.log.debug("I shouldn't land here!, it is a BUG") return False
python
def _ip_assigned(self): """Check if IP prefix is assigned to loopback interface. Returns: True if IP prefix found assigned otherwise False. """ output = [] cmd = [ '/sbin/ip', 'address', 'show', 'dev', self.config['interface'], 'to', self.ip_with_prefixlen, ] if self.ip_check_disabled: self.log.info("checking for IP assignment on interface %s is " "disabled", self.config['interface']) return True self.log.debug("running %s", ' '.join(cmd)) try: output = subprocess.check_output( cmd, universal_newlines=True, timeout=1) except subprocess.CalledProcessError as error: self.log.error("error checking IP-PREFIX %s: %s", cmd, error.output) # Because it is unlikely to ever get an error we return True return True except subprocess.TimeoutExpired: self.log.error("timeout running %s", ' '.join(cmd)) # Because it is unlikely to ever get a timeout we return True return True except ValueError as error: # We have been getting intermittent ValueErrors, see here # gist.github.com/unixsurfer/67db620d87f667423f6f6e3a04e0bff5 # It has happened ~5 times and this code is executed from multiple # threads and every ~10secs on several (~40) production servers for # more than 18months. # It could be a bug in Python or system returns corrupted data. # As a consequence of the raised exception thread dies and the # service isn't monitored anymore!. So, we now catch the exception. # While checking if an IP is assigned, we get an error unrelated to # that prevents us from knowing if it's assigned. We simply don't # know. A retry logic could be a more proper solution. self.log.error("running %s raised ValueError exception:%s", ' '.join(cmd), error) return True else: if self.ip_with_prefixlen in output: # pylint: disable=E1135,R1705 msg = "{i} assigned to loopback interface".format( i=self.ip_with_prefixlen) self.log.debug(msg) return True else: msg = ("{i} isn't assigned to {d} interface" .format(i=self.ip_with_prefixlen, d=self.config['interface'])) self.log.warning(msg) return False self.log.debug("I shouldn't land here!, it is a BUG") return False
[ "def", "_ip_assigned", "(", "self", ")", ":", "output", "=", "[", "]", "cmd", "=", "[", "'/sbin/ip'", ",", "'address'", ",", "'show'", ",", "'dev'", ",", "self", ".", "config", "[", "'interface'", "]", ",", "'to'", ",", "self", ".", "ip_with_prefixlen", ",", "]", "if", "self", ".", "ip_check_disabled", ":", "self", ".", "log", ".", "info", "(", "\"checking for IP assignment on interface %s is \"", "\"disabled\"", ",", "self", ".", "config", "[", "'interface'", "]", ")", "return", "True", "self", ".", "log", ".", "debug", "(", "\"running %s\"", ",", "' '", ".", "join", "(", "cmd", ")", ")", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "cmd", ",", "universal_newlines", "=", "True", ",", "timeout", "=", "1", ")", "except", "subprocess", ".", "CalledProcessError", "as", "error", ":", "self", ".", "log", ".", "error", "(", "\"error checking IP-PREFIX %s: %s\"", ",", "cmd", ",", "error", ".", "output", ")", "# Because it is unlikely to ever get an error we return True", "return", "True", "except", "subprocess", ".", "TimeoutExpired", ":", "self", ".", "log", ".", "error", "(", "\"timeout running %s\"", ",", "' '", ".", "join", "(", "cmd", ")", ")", "# Because it is unlikely to ever get a timeout we return True", "return", "True", "except", "ValueError", "as", "error", ":", "# We have been getting intermittent ValueErrors, see here", "# gist.github.com/unixsurfer/67db620d87f667423f6f6e3a04e0bff5", "# It has happened ~5 times and this code is executed from multiple", "# threads and every ~10secs on several (~40) production servers for", "# more than 18months.", "# It could be a bug in Python or system returns corrupted data.", "# As a consequence of the raised exception thread dies and the", "# service isn't monitored anymore!. So, we now catch the exception.", "# While checking if an IP is assigned, we get an error unrelated to", "# that prevents us from knowing if it's assigned. We simply don't", "# know. A retry logic could be a more proper solution.", "self", ".", "log", ".", "error", "(", "\"running %s raised ValueError exception:%s\"", ",", "' '", ".", "join", "(", "cmd", ")", ",", "error", ")", "return", "True", "else", ":", "if", "self", ".", "ip_with_prefixlen", "in", "output", ":", "# pylint: disable=E1135,R1705", "msg", "=", "\"{i} assigned to loopback interface\"", ".", "format", "(", "i", "=", "self", ".", "ip_with_prefixlen", ")", "self", ".", "log", ".", "debug", "(", "msg", ")", "return", "True", "else", ":", "msg", "=", "(", "\"{i} isn't assigned to {d} interface\"", ".", "format", "(", "i", "=", "self", ".", "ip_with_prefixlen", ",", "d", "=", "self", ".", "config", "[", "'interface'", "]", ")", ")", "self", ".", "log", ".", "warning", "(", "msg", ")", "return", "False", "self", ".", "log", ".", "debug", "(", "\"I shouldn't land here!, it is a BUG\"", ")", "return", "False" ]
Check if IP prefix is assigned to loopback interface. Returns: True if IP prefix found assigned otherwise False.
[ "Check", "if", "IP", "prefix", "is", "assigned", "to", "loopback", "interface", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/servicecheck.py#L125-L193
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/servicecheck.py
ServiceCheck._check_disabled
def _check_disabled(self): """Check if health check is disabled. It logs a message if health check is disabled and it also adds an item to the action queue based on 'on_disabled' setting. Returns: True if check is disabled otherwise False. """ if self.config['check_disabled']: if self.config['on_disabled'] == 'withdraw': self.log.info("Check is disabled and ip_prefix will be " "withdrawn") self.log.info("adding %s in the queue", self.ip_with_prefixlen) self.action.put(self.del_operation) self.log.info("Check is now permanently disabled") elif self.config['on_disabled'] == 'advertise': self.log.info("check is disabled, ip_prefix wont be withdrawn") self.log.info("adding %s in the queue", self.ip_with_prefixlen) self.action.put(self.add_operation) self.log.info('check is now permanently disabled') return True return False
python
def _check_disabled(self): """Check if health check is disabled. It logs a message if health check is disabled and it also adds an item to the action queue based on 'on_disabled' setting. Returns: True if check is disabled otherwise False. """ if self.config['check_disabled']: if self.config['on_disabled'] == 'withdraw': self.log.info("Check is disabled and ip_prefix will be " "withdrawn") self.log.info("adding %s in the queue", self.ip_with_prefixlen) self.action.put(self.del_operation) self.log.info("Check is now permanently disabled") elif self.config['on_disabled'] == 'advertise': self.log.info("check is disabled, ip_prefix wont be withdrawn") self.log.info("adding %s in the queue", self.ip_with_prefixlen) self.action.put(self.add_operation) self.log.info('check is now permanently disabled') return True return False
[ "def", "_check_disabled", "(", "self", ")", ":", "if", "self", ".", "config", "[", "'check_disabled'", "]", ":", "if", "self", ".", "config", "[", "'on_disabled'", "]", "==", "'withdraw'", ":", "self", ".", "log", ".", "info", "(", "\"Check is disabled and ip_prefix will be \"", "\"withdrawn\"", ")", "self", ".", "log", ".", "info", "(", "\"adding %s in the queue\"", ",", "self", ".", "ip_with_prefixlen", ")", "self", ".", "action", ".", "put", "(", "self", ".", "del_operation", ")", "self", ".", "log", ".", "info", "(", "\"Check is now permanently disabled\"", ")", "elif", "self", ".", "config", "[", "'on_disabled'", "]", "==", "'advertise'", ":", "self", ".", "log", ".", "info", "(", "\"check is disabled, ip_prefix wont be withdrawn\"", ")", "self", ".", "log", ".", "info", "(", "\"adding %s in the queue\"", ",", "self", ".", "ip_with_prefixlen", ")", "self", ".", "action", ".", "put", "(", "self", ".", "add_operation", ")", "self", ".", "log", ".", "info", "(", "'check is now permanently disabled'", ")", "return", "True", "return", "False" ]
Check if health check is disabled. It logs a message if health check is disabled and it also adds an item to the action queue based on 'on_disabled' setting. Returns: True if check is disabled otherwise False.
[ "Check", "if", "health", "check", "is", "disabled", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/servicecheck.py#L195-L220
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/servicecheck.py
ServiceCheck.run
def run(self): """Wrap _run method.""" # Catch all possible exceptions raised by the running thread # and let parent process know about it. try: self._run() except Exception: # pylint: disable=broad-except self.action.put( ServiceCheckDiedError(self.name, traceback.format_exc()) )
python
def run(self): """Wrap _run method.""" # Catch all possible exceptions raised by the running thread # and let parent process know about it. try: self._run() except Exception: # pylint: disable=broad-except self.action.put( ServiceCheckDiedError(self.name, traceback.format_exc()) )
[ "def", "run", "(", "self", ")", ":", "# Catch all possible exceptions raised by the running thread", "# and let parent process know about it.", "try", ":", "self", ".", "_run", "(", ")", "except", "Exception", ":", "# pylint: disable=broad-except", "self", ".", "action", ".", "put", "(", "ServiceCheckDiedError", "(", "self", ".", "name", ",", "traceback", ".", "format_exc", "(", ")", ")", ")" ]
Wrap _run method.
[ "Wrap", "_run", "method", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/servicecheck.py#L222-L231
train
unixsurfer/anycast_healthchecker
anycast_healthchecker/main.py
main
def main(): """Parse CLI and starts main program.""" args = docopt(__doc__, version=__version__) if args['--print']: for section in DEFAULT_OPTIONS: print("[{}]".format(section)) for key, value in DEFAULT_OPTIONS[section].items(): print("{k} = {v}".format(k=key, v=value)) print() sys.exit(0) try: config, bird_configuration = load_configuration(args['--file'], args['--dir'], args['--service-file']) except ValueError as exc: sys.exit('Invalid configuration: ' + str(exc)) if args['--check']: print("OK") sys.exit(0) if args['--print-conf']: for section in config: print("[{}]".format(section)) for key, value in config[section].items(): print("{k} = {v}".format(k=key, v=value)) print() sys.exit(0) try: lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) lock_socket.bind('\0' + "{}".format(PROGRAM_NAME)) except socket.error as exc: sys.exit("failed to acquire a lock by creating an abstract namespace" " socket: {}".format(exc)) else: print("acquired a lock by creating an abstract namespace socket: {}" .format(lock_socket)) # Clean old pidfile, if it exists, and write PID to it. pidfile = config.get('daemon', 'pidfile') update_pidfile(pidfile) # Register our shutdown handler to various termination signals. shutdown_handler = partial(shutdown, pidfile) signal.signal(signal.SIGHUP, shutdown_handler) signal.signal(signal.SIGTERM, shutdown_handler) signal.signal(signal.SIGABRT, shutdown_handler) signal.signal(signal.SIGINT, shutdown_handler) # Set up loggers. logger = setup_logger(config) # Perform a sanity check on IP-Prefixes ip_prefixes_sanity_check(config, bird_configuration) # Create our master process. checker = healthchecker.HealthChecker(config, bird_configuration) logger.info("starting %s version %s", PROGRAM_NAME, __version__) checker.run()
python
def main(): """Parse CLI and starts main program.""" args = docopt(__doc__, version=__version__) if args['--print']: for section in DEFAULT_OPTIONS: print("[{}]".format(section)) for key, value in DEFAULT_OPTIONS[section].items(): print("{k} = {v}".format(k=key, v=value)) print() sys.exit(0) try: config, bird_configuration = load_configuration(args['--file'], args['--dir'], args['--service-file']) except ValueError as exc: sys.exit('Invalid configuration: ' + str(exc)) if args['--check']: print("OK") sys.exit(0) if args['--print-conf']: for section in config: print("[{}]".format(section)) for key, value in config[section].items(): print("{k} = {v}".format(k=key, v=value)) print() sys.exit(0) try: lock_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) lock_socket.bind('\0' + "{}".format(PROGRAM_NAME)) except socket.error as exc: sys.exit("failed to acquire a lock by creating an abstract namespace" " socket: {}".format(exc)) else: print("acquired a lock by creating an abstract namespace socket: {}" .format(lock_socket)) # Clean old pidfile, if it exists, and write PID to it. pidfile = config.get('daemon', 'pidfile') update_pidfile(pidfile) # Register our shutdown handler to various termination signals. shutdown_handler = partial(shutdown, pidfile) signal.signal(signal.SIGHUP, shutdown_handler) signal.signal(signal.SIGTERM, shutdown_handler) signal.signal(signal.SIGABRT, shutdown_handler) signal.signal(signal.SIGINT, shutdown_handler) # Set up loggers. logger = setup_logger(config) # Perform a sanity check on IP-Prefixes ip_prefixes_sanity_check(config, bird_configuration) # Create our master process. checker = healthchecker.HealthChecker(config, bird_configuration) logger.info("starting %s version %s", PROGRAM_NAME, __version__) checker.run()
[ "def", "main", "(", ")", ":", "args", "=", "docopt", "(", "__doc__", ",", "version", "=", "__version__", ")", "if", "args", "[", "'--print'", "]", ":", "for", "section", "in", "DEFAULT_OPTIONS", ":", "print", "(", "\"[{}]\"", ".", "format", "(", "section", ")", ")", "for", "key", ",", "value", "in", "DEFAULT_OPTIONS", "[", "section", "]", ".", "items", "(", ")", ":", "print", "(", "\"{k} = {v}\"", ".", "format", "(", "k", "=", "key", ",", "v", "=", "value", ")", ")", "print", "(", ")", "sys", ".", "exit", "(", "0", ")", "try", ":", "config", ",", "bird_configuration", "=", "load_configuration", "(", "args", "[", "'--file'", "]", ",", "args", "[", "'--dir'", "]", ",", "args", "[", "'--service-file'", "]", ")", "except", "ValueError", "as", "exc", ":", "sys", ".", "exit", "(", "'Invalid configuration: '", "+", "str", "(", "exc", ")", ")", "if", "args", "[", "'--check'", "]", ":", "print", "(", "\"OK\"", ")", "sys", ".", "exit", "(", "0", ")", "if", "args", "[", "'--print-conf'", "]", ":", "for", "section", "in", "config", ":", "print", "(", "\"[{}]\"", ".", "format", "(", "section", ")", ")", "for", "key", ",", "value", "in", "config", "[", "section", "]", ".", "items", "(", ")", ":", "print", "(", "\"{k} = {v}\"", ".", "format", "(", "k", "=", "key", ",", "v", "=", "value", ")", ")", "print", "(", ")", "sys", ".", "exit", "(", "0", ")", "try", ":", "lock_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_DGRAM", ")", "lock_socket", ".", "bind", "(", "'\\0'", "+", "\"{}\"", ".", "format", "(", "PROGRAM_NAME", ")", ")", "except", "socket", ".", "error", "as", "exc", ":", "sys", ".", "exit", "(", "\"failed to acquire a lock by creating an abstract namespace\"", "\" socket: {}\"", ".", "format", "(", "exc", ")", ")", "else", ":", "print", "(", "\"acquired a lock by creating an abstract namespace socket: {}\"", ".", "format", "(", "lock_socket", ")", ")", "# Clean old pidfile, if it exists, and write PID to it.", "pidfile", "=", "config", ".", "get", "(", "'daemon'", ",", "'pidfile'", ")", "update_pidfile", "(", "pidfile", ")", "# Register our shutdown handler to various termination signals.", "shutdown_handler", "=", "partial", "(", "shutdown", ",", "pidfile", ")", "signal", ".", "signal", "(", "signal", ".", "SIGHUP", ",", "shutdown_handler", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "shutdown_handler", ")", "signal", ".", "signal", "(", "signal", ".", "SIGABRT", ",", "shutdown_handler", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "shutdown_handler", ")", "# Set up loggers.", "logger", "=", "setup_logger", "(", "config", ")", "# Perform a sanity check on IP-Prefixes", "ip_prefixes_sanity_check", "(", "config", ",", "bird_configuration", ")", "# Create our master process.", "checker", "=", "healthchecker", ".", "HealthChecker", "(", "config", ",", "bird_configuration", ")", "logger", ".", "info", "(", "\"starting %s version %s\"", ",", "PROGRAM_NAME", ",", "__version__", ")", "checker", ".", "run", "(", ")" ]
Parse CLI and starts main program.
[ "Parse", "CLI", "and", "starts", "main", "program", "." ]
3ab9c1d65d550eb30621ced2434252f61d1fdd33
https://github.com/unixsurfer/anycast_healthchecker/blob/3ab9c1d65d550eb30621ced2434252f61d1fdd33/anycast_healthchecker/main.py#L38-L98
train
sashahart/vex
vex/run.py
get_environ
def get_environ(environ, defaults, ve_path): """Make an environment to run with. """ # Copy the parent environment, add in defaults from .vexrc. env = environ.copy() env.update(defaults) # Leaving in existing PYTHONHOME can cause some errors if 'PYTHONHOME' in env: del env['PYTHONHOME'] # Now we have to adjust PATH to find scripts for the virtualenv... # PATH being unset/empty is OK, but ve_path must be set # or there is nothing for us to do here and it's bad. if not ve_path: raise exceptions.BadConfig('ve_path must be set') if platform.system() == 'Windows': ve_bin = os.path.join(ve_path, 'Scripts') else: ve_bin = os.path.join(ve_path, 'bin') # If user is currently in a virtualenv, DON'T just prepend # to its path (vex foo; echo $PATH -> " /foo/bin:/bar/bin") # but don't incur this cost unless we're already in one. # activate handles this by running 'deactivate' first, we don't # have that so we have to use other ways. # This would not be necessary and things would be simpler if vex # did not have to interoperate with a ubiquitous existing tool. # virtualenv doesn't... current_ve = env.get('VIRTUAL_ENV', '') system_path = environ.get('PATH', '') segments = system_path.split(os.pathsep) if current_ve: # Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to # manually remove the virtualenv's bin. # A virtualenv's bin should not normally be on PATH except # via activate or similar, so I'm OK with this solution. current_ve_bin = os.path.join(current_ve, 'bin') try: segments.remove(current_ve_bin) except ValueError: raise exceptions.BadConfig( "something set VIRTUAL_ENV prior to this vex execution, " "implying that a virtualenv is already activated " "and PATH should contain the virtualenv's bin directory. " "Unfortunately, it doesn't: it's {0!r}. " "You might want to check that PATH is not " "getting clobbered somewhere, e.g. in your shell's configs." .format(system_path) ) segments.insert(0, ve_bin) env['PATH'] = os.pathsep.join(segments) env['VIRTUAL_ENV'] = ve_path return env
python
def get_environ(environ, defaults, ve_path): """Make an environment to run with. """ # Copy the parent environment, add in defaults from .vexrc. env = environ.copy() env.update(defaults) # Leaving in existing PYTHONHOME can cause some errors if 'PYTHONHOME' in env: del env['PYTHONHOME'] # Now we have to adjust PATH to find scripts for the virtualenv... # PATH being unset/empty is OK, but ve_path must be set # or there is nothing for us to do here and it's bad. if not ve_path: raise exceptions.BadConfig('ve_path must be set') if platform.system() == 'Windows': ve_bin = os.path.join(ve_path, 'Scripts') else: ve_bin = os.path.join(ve_path, 'bin') # If user is currently in a virtualenv, DON'T just prepend # to its path (vex foo; echo $PATH -> " /foo/bin:/bar/bin") # but don't incur this cost unless we're already in one. # activate handles this by running 'deactivate' first, we don't # have that so we have to use other ways. # This would not be necessary and things would be simpler if vex # did not have to interoperate with a ubiquitous existing tool. # virtualenv doesn't... current_ve = env.get('VIRTUAL_ENV', '') system_path = environ.get('PATH', '') segments = system_path.split(os.pathsep) if current_ve: # Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to # manually remove the virtualenv's bin. # A virtualenv's bin should not normally be on PATH except # via activate or similar, so I'm OK with this solution. current_ve_bin = os.path.join(current_ve, 'bin') try: segments.remove(current_ve_bin) except ValueError: raise exceptions.BadConfig( "something set VIRTUAL_ENV prior to this vex execution, " "implying that a virtualenv is already activated " "and PATH should contain the virtualenv's bin directory. " "Unfortunately, it doesn't: it's {0!r}. " "You might want to check that PATH is not " "getting clobbered somewhere, e.g. in your shell's configs." .format(system_path) ) segments.insert(0, ve_bin) env['PATH'] = os.pathsep.join(segments) env['VIRTUAL_ENV'] = ve_path return env
[ "def", "get_environ", "(", "environ", ",", "defaults", ",", "ve_path", ")", ":", "# Copy the parent environment, add in defaults from .vexrc.", "env", "=", "environ", ".", "copy", "(", ")", "env", ".", "update", "(", "defaults", ")", "# Leaving in existing PYTHONHOME can cause some errors", "if", "'PYTHONHOME'", "in", "env", ":", "del", "env", "[", "'PYTHONHOME'", "]", "# Now we have to adjust PATH to find scripts for the virtualenv...", "# PATH being unset/empty is OK, but ve_path must be set", "# or there is nothing for us to do here and it's bad.", "if", "not", "ve_path", ":", "raise", "exceptions", ".", "BadConfig", "(", "'ve_path must be set'", ")", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "ve_bin", "=", "os", ".", "path", ".", "join", "(", "ve_path", ",", "'Scripts'", ")", "else", ":", "ve_bin", "=", "os", ".", "path", ".", "join", "(", "ve_path", ",", "'bin'", ")", "# If user is currently in a virtualenv, DON'T just prepend", "# to its path (vex foo; echo $PATH -> \" /foo/bin:/bar/bin\")", "# but don't incur this cost unless we're already in one.", "# activate handles this by running 'deactivate' first, we don't", "# have that so we have to use other ways.", "# This would not be necessary and things would be simpler if vex", "# did not have to interoperate with a ubiquitous existing tool.", "# virtualenv doesn't...", "current_ve", "=", "env", ".", "get", "(", "'VIRTUAL_ENV'", ",", "''", ")", "system_path", "=", "environ", ".", "get", "(", "'PATH'", ",", "''", ")", "segments", "=", "system_path", ".", "split", "(", "os", ".", "pathsep", ")", "if", "current_ve", ":", "# Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to", "# manually remove the virtualenv's bin.", "# A virtualenv's bin should not normally be on PATH except", "# via activate or similar, so I'm OK with this solution.", "current_ve_bin", "=", "os", ".", "path", ".", "join", "(", "current_ve", ",", "'bin'", ")", "try", ":", "segments", ".", "remove", "(", "current_ve_bin", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "BadConfig", "(", "\"something set VIRTUAL_ENV prior to this vex execution, \"", "\"implying that a virtualenv is already activated \"", "\"and PATH should contain the virtualenv's bin directory. \"", "\"Unfortunately, it doesn't: it's {0!r}. \"", "\"You might want to check that PATH is not \"", "\"getting clobbered somewhere, e.g. in your shell's configs.\"", ".", "format", "(", "system_path", ")", ")", "segments", ".", "insert", "(", "0", ",", "ve_bin", ")", "env", "[", "'PATH'", "]", "=", "os", ".", "pathsep", ".", "join", "(", "segments", ")", "env", "[", "'VIRTUAL_ENV'", "]", "=", "ve_path", "return", "env" ]
Make an environment to run with.
[ "Make", "an", "environment", "to", "run", "with", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/run.py#L10-L64
train
sashahart/vex
vex/config.py
extract_key_value
def extract_key_value(line, environ): """Return key, value from given line if present, else return None. """ segments = line.split("=", 1) if len(segments) < 2: return None key, value = segments # foo passes through as-is (with spaces stripped) # '{foo}' passes through literally # "{foo}" substitutes from environ's foo value = value.strip() if value[0] == "'" and _SQUOTE_RE.match(value): value = value[1:-1] elif value[0] == '"' and _DQUOTE_RE.match(value): template = value[1:-1] value = template.format(**environ) key = key.strip() value = value.strip() return key, value
python
def extract_key_value(line, environ): """Return key, value from given line if present, else return None. """ segments = line.split("=", 1) if len(segments) < 2: return None key, value = segments # foo passes through as-is (with spaces stripped) # '{foo}' passes through literally # "{foo}" substitutes from environ's foo value = value.strip() if value[0] == "'" and _SQUOTE_RE.match(value): value = value[1:-1] elif value[0] == '"' and _DQUOTE_RE.match(value): template = value[1:-1] value = template.format(**environ) key = key.strip() value = value.strip() return key, value
[ "def", "extract_key_value", "(", "line", ",", "environ", ")", ":", "segments", "=", "line", ".", "split", "(", "\"=\"", ",", "1", ")", "if", "len", "(", "segments", ")", "<", "2", ":", "return", "None", "key", ",", "value", "=", "segments", "# foo passes through as-is (with spaces stripped)", "# '{foo}' passes through literally", "# \"{foo}\" substitutes from environ's foo", "value", "=", "value", ".", "strip", "(", ")", "if", "value", "[", "0", "]", "==", "\"'\"", "and", "_SQUOTE_RE", ".", "match", "(", "value", ")", ":", "value", "=", "value", "[", "1", ":", "-", "1", "]", "elif", "value", "[", "0", "]", "==", "'\"'", "and", "_DQUOTE_RE", ".", "match", "(", "value", ")", ":", "template", "=", "value", "[", "1", ":", "-", "1", "]", "value", "=", "template", ".", "format", "(", "*", "*", "environ", ")", "key", "=", "key", ".", "strip", "(", ")", "value", "=", "value", ".", "strip", "(", ")", "return", "key", ",", "value" ]
Return key, value from given line if present, else return None.
[ "Return", "key", "value", "from", "given", "line", "if", "present", "else", "return", "None", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L129-L147
train
sashahart/vex
vex/config.py
Vexrc.from_file
def from_file(cls, path, environ): """Make a Vexrc instance from given file in given environ. """ instance = cls() instance.read(path, environ) return instance
python
def from_file(cls, path, environ): """Make a Vexrc instance from given file in given environ. """ instance = cls() instance.read(path, environ) return instance
[ "def", "from_file", "(", "cls", ",", "path", ",", "environ", ")", ":", "instance", "=", "cls", "(", ")", "instance", ".", "read", "(", "path", ",", "environ", ")", "return", "instance" ]
Make a Vexrc instance from given file in given environ.
[ "Make", "a", "Vexrc", "instance", "from", "given", "file", "in", "given", "environ", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L54-L59
train
sashahart/vex
vex/config.py
Vexrc.read
def read(self, path, environ): """Read data from file into this vexrc instance. """ try: inp = open(path, 'rb') except FileNotFoundError as error: if error.errno != 2: raise return None parsing = parse_vexrc(inp, environ) for heading, key, value in parsing: heading = self.default_heading if heading is None else heading if heading not in self.headings: self.headings[heading] = OrderedDict() self.headings[heading][key] = value parsing.close()
python
def read(self, path, environ): """Read data from file into this vexrc instance. """ try: inp = open(path, 'rb') except FileNotFoundError as error: if error.errno != 2: raise return None parsing = parse_vexrc(inp, environ) for heading, key, value in parsing: heading = self.default_heading if heading is None else heading if heading not in self.headings: self.headings[heading] = OrderedDict() self.headings[heading][key] = value parsing.close()
[ "def", "read", "(", "self", ",", "path", ",", "environ", ")", ":", "try", ":", "inp", "=", "open", "(", "path", ",", "'rb'", ")", "except", "FileNotFoundError", "as", "error", ":", "if", "error", ".", "errno", "!=", "2", ":", "raise", "return", "None", "parsing", "=", "parse_vexrc", "(", "inp", ",", "environ", ")", "for", "heading", ",", "key", ",", "value", "in", "parsing", ":", "heading", "=", "self", ".", "default_heading", "if", "heading", "is", "None", "else", "heading", "if", "heading", "not", "in", "self", ".", "headings", ":", "self", ".", "headings", "[", "heading", "]", "=", "OrderedDict", "(", ")", "self", ".", "headings", "[", "heading", "]", "[", "key", "]", "=", "value", "parsing", ".", "close", "(", ")" ]
Read data from file into this vexrc instance.
[ "Read", "data", "from", "file", "into", "this", "vexrc", "instance", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L61-L76
train
sashahart/vex
vex/config.py
Vexrc.get_ve_base
def get_ve_base(self, environ): """Find a directory to look for virtualenvs in. """ # set ve_base to a path we can look for virtualenvs: # 1. .vexrc # 2. WORKON_HOME (as defined for virtualenvwrapper's benefit) # 3. $HOME/.virtualenvs # (unless we got --path, then we don't need it) ve_base_value = self.headings[self.default_heading].get('virtualenvs') if ve_base_value: ve_base = os.path.expanduser(ve_base_value) else: ve_base = environ.get('WORKON_HOME', '') if not ve_base: # On Cygwin os.name == 'posix' and we want $HOME. if platform.system() == 'Windows' and os.name == 'nt': _win_drive = environ.get('HOMEDRIVE') home = environ.get('HOMEPATH', '') if home: home = os.path.join(_win_drive, home) else: home = environ.get('HOME', '') if not home: home = os.path.expanduser('~') if not home: return '' ve_base = os.path.join(home, '.virtualenvs') # pass through invalid paths so messages can be generated # if not os.path.exists(ve_base) or os.path.isfile(ve_base): # return '' return ve_base or ''
python
def get_ve_base(self, environ): """Find a directory to look for virtualenvs in. """ # set ve_base to a path we can look for virtualenvs: # 1. .vexrc # 2. WORKON_HOME (as defined for virtualenvwrapper's benefit) # 3. $HOME/.virtualenvs # (unless we got --path, then we don't need it) ve_base_value = self.headings[self.default_heading].get('virtualenvs') if ve_base_value: ve_base = os.path.expanduser(ve_base_value) else: ve_base = environ.get('WORKON_HOME', '') if not ve_base: # On Cygwin os.name == 'posix' and we want $HOME. if platform.system() == 'Windows' and os.name == 'nt': _win_drive = environ.get('HOMEDRIVE') home = environ.get('HOMEPATH', '') if home: home = os.path.join(_win_drive, home) else: home = environ.get('HOME', '') if not home: home = os.path.expanduser('~') if not home: return '' ve_base = os.path.join(home, '.virtualenvs') # pass through invalid paths so messages can be generated # if not os.path.exists(ve_base) or os.path.isfile(ve_base): # return '' return ve_base or ''
[ "def", "get_ve_base", "(", "self", ",", "environ", ")", ":", "# set ve_base to a path we can look for virtualenvs:", "# 1. .vexrc", "# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)", "# 3. $HOME/.virtualenvs", "# (unless we got --path, then we don't need it)", "ve_base_value", "=", "self", ".", "headings", "[", "self", ".", "default_heading", "]", ".", "get", "(", "'virtualenvs'", ")", "if", "ve_base_value", ":", "ve_base", "=", "os", ".", "path", ".", "expanduser", "(", "ve_base_value", ")", "else", ":", "ve_base", "=", "environ", ".", "get", "(", "'WORKON_HOME'", ",", "''", ")", "if", "not", "ve_base", ":", "# On Cygwin os.name == 'posix' and we want $HOME.", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", "and", "os", ".", "name", "==", "'nt'", ":", "_win_drive", "=", "environ", ".", "get", "(", "'HOMEDRIVE'", ")", "home", "=", "environ", ".", "get", "(", "'HOMEPATH'", ",", "''", ")", "if", "home", ":", "home", "=", "os", ".", "path", ".", "join", "(", "_win_drive", ",", "home", ")", "else", ":", "home", "=", "environ", ".", "get", "(", "'HOME'", ",", "''", ")", "if", "not", "home", ":", "home", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "if", "not", "home", ":", "return", "''", "ve_base", "=", "os", ".", "path", ".", "join", "(", "home", ",", "'.virtualenvs'", ")", "# pass through invalid paths so messages can be generated", "# if not os.path.exists(ve_base) or os.path.isfile(ve_base):", "# return ''", "return", "ve_base", "or", "''" ]
Find a directory to look for virtualenvs in.
[ "Find", "a", "directory", "to", "look", "for", "virtualenvs", "in", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L78-L108
train
sashahart/vex
vex/config.py
Vexrc.get_shell
def get_shell(self, environ): """Find a command to run. """ command = self.headings[self.default_heading].get('shell') if not command and os.name != 'nt': command = environ.get('SHELL', '') command = shlex.split(command) if command else None return command
python
def get_shell(self, environ): """Find a command to run. """ command = self.headings[self.default_heading].get('shell') if not command and os.name != 'nt': command = environ.get('SHELL', '') command = shlex.split(command) if command else None return command
[ "def", "get_shell", "(", "self", ",", "environ", ")", ":", "command", "=", "self", ".", "headings", "[", "self", ".", "default_heading", "]", ".", "get", "(", "'shell'", ")", "if", "not", "command", "and", "os", ".", "name", "!=", "'nt'", ":", "command", "=", "environ", ".", "get", "(", "'SHELL'", ",", "''", ")", "command", "=", "shlex", ".", "split", "(", "command", ")", "if", "command", "else", "None", "return", "command" ]
Find a command to run.
[ "Find", "a", "command", "to", "run", "." ]
b7680c40897b8cbe6aae55ec9812b4fb11738192
https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L110-L117
train
rndusr/torf
torf/_torrent.py
Torrent.name
def name(self): """ Name of the torrent Default to last item in :attr:`path` or ``None`` if :attr:`path` is ``None``. Setting this property sets or removes ``name`` in :attr:`metainfo`\ ``['info']``. """ if 'name' not in self.metainfo['info'] and self.path is not None: self.metainfo['info']['name'] = os.path.basename(self.path) return self.metainfo['info'].get('name', None)
python
def name(self): """ Name of the torrent Default to last item in :attr:`path` or ``None`` if :attr:`path` is ``None``. Setting this property sets or removes ``name`` in :attr:`metainfo`\ ``['info']``. """ if 'name' not in self.metainfo['info'] and self.path is not None: self.metainfo['info']['name'] = os.path.basename(self.path) return self.metainfo['info'].get('name', None)
[ "def", "name", "(", "self", ")", ":", "if", "'name'", "not", "in", "self", ".", "metainfo", "[", "'info'", "]", "and", "self", ".", "path", "is", "not", "None", ":", "self", ".", "metainfo", "[", "'info'", "]", "[", "'name'", "]", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "path", ")", "return", "self", ".", "metainfo", "[", "'info'", "]", ".", "get", "(", "'name'", ",", "None", ")" ]
Name of the torrent Default to last item in :attr:`path` or ``None`` if :attr:`path` is ``None``. Setting this property sets or removes ``name`` in :attr:`metainfo`\ ``['info']``.
[ "Name", "of", "the", "torrent" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L323-L335
train
rndusr/torf
torf/_torrent.py
Torrent.trackers
def trackers(self): """ List of tiers of announce URLs or ``None`` for no trackers A tier is either a single announce URL (:class:`str`) or an :class:`~collections.abc.Iterable` (e.g. :class:`list`) of announce URLs. Setting this property sets or removes ``announce`` and ``announce-list`` in :attr:`metainfo`. ``announce`` is set to the first tracker of the first tier. :raises URLError: if any of the announce URLs is invalid """ announce_list = self.metainfo.get('announce-list', None) if not announce_list: announce = self.metainfo.get('announce', None) if announce: return [[announce]] else: return announce_list
python
def trackers(self): """ List of tiers of announce URLs or ``None`` for no trackers A tier is either a single announce URL (:class:`str`) or an :class:`~collections.abc.Iterable` (e.g. :class:`list`) of announce URLs. Setting this property sets or removes ``announce`` and ``announce-list`` in :attr:`metainfo`. ``announce`` is set to the first tracker of the first tier. :raises URLError: if any of the announce URLs is invalid """ announce_list = self.metainfo.get('announce-list', None) if not announce_list: announce = self.metainfo.get('announce', None) if announce: return [[announce]] else: return announce_list
[ "def", "trackers", "(", "self", ")", ":", "announce_list", "=", "self", ".", "metainfo", ".", "get", "(", "'announce-list'", ",", "None", ")", "if", "not", "announce_list", ":", "announce", "=", "self", ".", "metainfo", ".", "get", "(", "'announce'", ",", "None", ")", "if", "announce", ":", "return", "[", "[", "announce", "]", "]", "else", ":", "return", "announce_list" ]
List of tiers of announce URLs or ``None`` for no trackers A tier is either a single announce URL (:class:`str`) or an :class:`~collections.abc.Iterable` (e.g. :class:`list`) of announce URLs. Setting this property sets or removes ``announce`` and ``announce-list`` in :attr:`metainfo`. ``announce`` is set to the first tracker of the first tier. :raises URLError: if any of the announce URLs is invalid
[ "List", "of", "tiers", "of", "announce", "URLs", "or", "None", "for", "no", "trackers" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L345-L365
train
rndusr/torf
torf/_torrent.py
Torrent.infohash
def infohash(self): """SHA1 info hash""" self.validate() info = self.convert()[b'info'] return sha1(bencode(info)).hexdigest()
python
def infohash(self): """SHA1 info hash""" self.validate() info = self.convert()[b'info'] return sha1(bencode(info)).hexdigest()
[ "def", "infohash", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "info", "=", "self", ".", "convert", "(", ")", "[", "b'info'", "]", "return", "sha1", "(", "bencode", "(", "info", ")", ")", ".", "hexdigest", "(", ")" ]
SHA1 info hash
[ "SHA1", "info", "hash" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L548-L552
train
rndusr/torf
torf/_torrent.py
Torrent.infohash_base32
def infohash_base32(self): """Base32 encoded SHA1 info hash""" self.validate() info = self.convert()[b'info'] return b32encode(sha1(bencode(info)).digest())
python
def infohash_base32(self): """Base32 encoded SHA1 info hash""" self.validate() info = self.convert()[b'info'] return b32encode(sha1(bencode(info)).digest())
[ "def", "infohash_base32", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "info", "=", "self", ".", "convert", "(", ")", "[", "b'info'", "]", "return", "b32encode", "(", "sha1", "(", "bencode", "(", "info", ")", ")", ".", "digest", "(", ")", ")" ]
Base32 encoded SHA1 info hash
[ "Base32", "encoded", "SHA1", "info", "hash" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L555-L559
train
rndusr/torf
torf/_torrent.py
Torrent.generate
def generate(self, callback=None, interval=0): """ Hash pieces and report progress to `callback` This method sets ``pieces`` in :attr:`metainfo`\ ``['info']`` when all pieces are hashed successfully. :param callable callback: Callable with signature ``(torrent, filepath, pieces_done, pieces_total)``; if `callback` returns anything else than None, hashing is canceled :param float interval: Minimum number of seconds between calls to `callback` (if 0, `callback` is called once per piece) :raises PathEmptyError: if :attr:`path` contains only empty files/directories :raises PathNotFoundError: if :attr:`path` does not exist :raises ReadError: if :attr:`path` or any file beneath it is not readable :return: ``True`` if all pieces were successfully hashed, ``False`` otherwise """ if self.path is None: raise RuntimeError('generate() called with no path specified') elif self.size <= 0: raise error.PathEmptyError(self.path) elif not os.path.exists(self.path): raise error.PathNotFoundError(self.path) if callback is not None: cancel = lambda *status: callback(*status) is not None else: cancel = lambda *status: False if os.path.isfile(self.path): pieces = self._set_pieces_singlefile() elif os.path.isdir(self.path): pieces = self._set_pieces_multifile() # Iterate over hashed pieces and send status information last_cb_call = 0 for filepath,pieces_done,pieces_total in pieces: now = time.time() if now - last_cb_call >= interval or \ pieces_done >= pieces_total: last_cb_call = now if cancel(self, filepath, pieces_done, pieces_total): return False return True
python
def generate(self, callback=None, interval=0): """ Hash pieces and report progress to `callback` This method sets ``pieces`` in :attr:`metainfo`\ ``['info']`` when all pieces are hashed successfully. :param callable callback: Callable with signature ``(torrent, filepath, pieces_done, pieces_total)``; if `callback` returns anything else than None, hashing is canceled :param float interval: Minimum number of seconds between calls to `callback` (if 0, `callback` is called once per piece) :raises PathEmptyError: if :attr:`path` contains only empty files/directories :raises PathNotFoundError: if :attr:`path` does not exist :raises ReadError: if :attr:`path` or any file beneath it is not readable :return: ``True`` if all pieces were successfully hashed, ``False`` otherwise """ if self.path is None: raise RuntimeError('generate() called with no path specified') elif self.size <= 0: raise error.PathEmptyError(self.path) elif not os.path.exists(self.path): raise error.PathNotFoundError(self.path) if callback is not None: cancel = lambda *status: callback(*status) is not None else: cancel = lambda *status: False if os.path.isfile(self.path): pieces = self._set_pieces_singlefile() elif os.path.isdir(self.path): pieces = self._set_pieces_multifile() # Iterate over hashed pieces and send status information last_cb_call = 0 for filepath,pieces_done,pieces_total in pieces: now = time.time() if now - last_cb_call >= interval or \ pieces_done >= pieces_total: last_cb_call = now if cancel(self, filepath, pieces_done, pieces_total): return False return True
[ "def", "generate", "(", "self", ",", "callback", "=", "None", ",", "interval", "=", "0", ")", ":", "if", "self", ".", "path", "is", "None", ":", "raise", "RuntimeError", "(", "'generate() called with no path specified'", ")", "elif", "self", ".", "size", "<=", "0", ":", "raise", "error", ".", "PathEmptyError", "(", "self", ".", "path", ")", "elif", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "path", ")", ":", "raise", "error", ".", "PathNotFoundError", "(", "self", ".", "path", ")", "if", "callback", "is", "not", "None", ":", "cancel", "=", "lambda", "*", "status", ":", "callback", "(", "*", "status", ")", "is", "not", "None", "else", ":", "cancel", "=", "lambda", "*", "status", ":", "False", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "path", ")", ":", "pieces", "=", "self", ".", "_set_pieces_singlefile", "(", ")", "elif", "os", ".", "path", ".", "isdir", "(", "self", ".", "path", ")", ":", "pieces", "=", "self", ".", "_set_pieces_multifile", "(", ")", "# Iterate over hashed pieces and send status information", "last_cb_call", "=", "0", "for", "filepath", ",", "pieces_done", ",", "pieces_total", "in", "pieces", ":", "now", "=", "time", ".", "time", "(", ")", "if", "now", "-", "last_cb_call", ">=", "interval", "or", "pieces_done", ">=", "pieces_total", ":", "last_cb_call", "=", "now", "if", "cancel", "(", "self", ",", "filepath", ",", "pieces_done", ",", "pieces_total", ")", ":", "return", "False", "return", "True" ]
Hash pieces and report progress to `callback` This method sets ``pieces`` in :attr:`metainfo`\ ``['info']`` when all pieces are hashed successfully. :param callable callback: Callable with signature ``(torrent, filepath, pieces_done, pieces_total)``; if `callback` returns anything else than None, hashing is canceled :param float interval: Minimum number of seconds between calls to `callback` (if 0, `callback` is called once per piece) :raises PathEmptyError: if :attr:`path` contains only empty files/directories :raises PathNotFoundError: if :attr:`path` does not exist :raises ReadError: if :attr:`path` or any file beneath it is not readable :return: ``True`` if all pieces were successfully hashed, ``False`` otherwise
[ "Hash", "pieces", "and", "report", "progress", "to", "callback" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L593-L641
train
rndusr/torf
torf/_torrent.py
Torrent.magnet
def magnet(self, name=True, size=True, trackers=True, tracker=False, validate=True): """ BTIH Magnet URI :param bool name: Whether to include the name :param bool size: Whether to include the size :param bool trackers: Whether to include all trackers :param bool tracker: Whether to include only the first tracker of the first tier (overrides `trackers`) :param bool validate: Whether to run :meth:`validate` first """ if validate: self.validate() parts = [f'xt=urn:btih:{self.infohash}'] if name: parts.append(f'dn={utils.urlquote(self.name)}') if size: parts.append(f'xl={self.size}') if self.trackers is not None: if tracker: parts.append(f'tr={utils.urlquote(self.trackers[0][0])}') elif trackers: for tier in self.trackers: for url in tier: parts.append(f'tr={utils.urlquote(url)}') return 'magnet:?' + '&'.join(parts)
python
def magnet(self, name=True, size=True, trackers=True, tracker=False, validate=True): """ BTIH Magnet URI :param bool name: Whether to include the name :param bool size: Whether to include the size :param bool trackers: Whether to include all trackers :param bool tracker: Whether to include only the first tracker of the first tier (overrides `trackers`) :param bool validate: Whether to run :meth:`validate` first """ if validate: self.validate() parts = [f'xt=urn:btih:{self.infohash}'] if name: parts.append(f'dn={utils.urlquote(self.name)}') if size: parts.append(f'xl={self.size}') if self.trackers is not None: if tracker: parts.append(f'tr={utils.urlquote(self.trackers[0][0])}') elif trackers: for tier in self.trackers: for url in tier: parts.append(f'tr={utils.urlquote(url)}') return 'magnet:?' + '&'.join(parts)
[ "def", "magnet", "(", "self", ",", "name", "=", "True", ",", "size", "=", "True", ",", "trackers", "=", "True", ",", "tracker", "=", "False", ",", "validate", "=", "True", ")", ":", "if", "validate", ":", "self", ".", "validate", "(", ")", "parts", "=", "[", "f'xt=urn:btih:{self.infohash}'", "]", "if", "name", ":", "parts", ".", "append", "(", "f'dn={utils.urlquote(self.name)}'", ")", "if", "size", ":", "parts", ".", "append", "(", "f'xl={self.size}'", ")", "if", "self", ".", "trackers", "is", "not", "None", ":", "if", "tracker", ":", "parts", ".", "append", "(", "f'tr={utils.urlquote(self.trackers[0][0])}'", ")", "elif", "trackers", ":", "for", "tier", "in", "self", ".", "trackers", ":", "for", "url", "in", "tier", ":", "parts", ".", "append", "(", "f'tr={utils.urlquote(url)}'", ")", "return", "'magnet:?'", "+", "'&'", ".", "join", "(", "parts", ")" ]
BTIH Magnet URI :param bool name: Whether to include the name :param bool size: Whether to include the size :param bool trackers: Whether to include all trackers :param bool tracker: Whether to include only the first tracker of the first tier (overrides `trackers`) :param bool validate: Whether to run :meth:`validate` first
[ "BTIH", "Magnet", "URI" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L867-L895
train
rndusr/torf
torf/_torrent.py
Torrent.read_stream
def read_stream(cls, stream, validate=True): """ Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: content = stream.read(cls.MAX_TORRENT_FILE_SIZE) except OSError as e: raise error.ReadError(e.errno) else: try: metainfo_enc = bdecode(content) except BTFailure as e: raise error.ParseError() if validate: if b'info' not in metainfo_enc: raise error.MetainfoError("Missing 'info'") elif not isinstance(metainfo_enc[b'info'], abc.Mapping): raise error.MetainfoError("'info' is not a dictionary") elif b'pieces' not in metainfo_enc[b'info']: raise error.MetainfoError("Missing 'pieces' in ['info']") # Extract 'pieces' from metainfo because it's the only byte string # that isn't supposed to be decoded to unicode. if b'info' in metainfo_enc and b'pieces' in metainfo_enc[b'info']: pieces = metainfo_enc[b'info'].pop(b'pieces') metainfo = utils.decode_dict(metainfo_enc) metainfo['info']['pieces'] = pieces else: metainfo = utils.decode_dict(metainfo_enc) torrent = cls() torrent._metainfo = metainfo # Convert some values from official types to something nicer # (e.g. int -> datetime) for attr in ('creation_date', 'private'): setattr(torrent, attr, getattr(torrent, attr)) # Auto-set 'include_md5' info = torrent.metainfo['info'] torrent.include_md5 = ('length' in info and 'md5sum' in info) or \ ('files' in info and all('md5sum' in fileinfo for fileinfo in info['files'])) if validate: torrent.validate() return torrent
python
def read_stream(cls, stream, validate=True): """ Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: content = stream.read(cls.MAX_TORRENT_FILE_SIZE) except OSError as e: raise error.ReadError(e.errno) else: try: metainfo_enc = bdecode(content) except BTFailure as e: raise error.ParseError() if validate: if b'info' not in metainfo_enc: raise error.MetainfoError("Missing 'info'") elif not isinstance(metainfo_enc[b'info'], abc.Mapping): raise error.MetainfoError("'info' is not a dictionary") elif b'pieces' not in metainfo_enc[b'info']: raise error.MetainfoError("Missing 'pieces' in ['info']") # Extract 'pieces' from metainfo because it's the only byte string # that isn't supposed to be decoded to unicode. if b'info' in metainfo_enc and b'pieces' in metainfo_enc[b'info']: pieces = metainfo_enc[b'info'].pop(b'pieces') metainfo = utils.decode_dict(metainfo_enc) metainfo['info']['pieces'] = pieces else: metainfo = utils.decode_dict(metainfo_enc) torrent = cls() torrent._metainfo = metainfo # Convert some values from official types to something nicer # (e.g. int -> datetime) for attr in ('creation_date', 'private'): setattr(torrent, attr, getattr(torrent, attr)) # Auto-set 'include_md5' info = torrent.metainfo['info'] torrent.include_md5 = ('length' in info and 'md5sum' in info) or \ ('files' in info and all('md5sum' in fileinfo for fileinfo in info['files'])) if validate: torrent.validate() return torrent
[ "def", "read_stream", "(", "cls", ",", "stream", ",", "validate", "=", "True", ")", ":", "try", ":", "content", "=", "stream", ".", "read", "(", "cls", ".", "MAX_TORRENT_FILE_SIZE", ")", "except", "OSError", "as", "e", ":", "raise", "error", ".", "ReadError", "(", "e", ".", "errno", ")", "else", ":", "try", ":", "metainfo_enc", "=", "bdecode", "(", "content", ")", "except", "BTFailure", "as", "e", ":", "raise", "error", ".", "ParseError", "(", ")", "if", "validate", ":", "if", "b'info'", "not", "in", "metainfo_enc", ":", "raise", "error", ".", "MetainfoError", "(", "\"Missing 'info'\"", ")", "elif", "not", "isinstance", "(", "metainfo_enc", "[", "b'info'", "]", ",", "abc", ".", "Mapping", ")", ":", "raise", "error", ".", "MetainfoError", "(", "\"'info' is not a dictionary\"", ")", "elif", "b'pieces'", "not", "in", "metainfo_enc", "[", "b'info'", "]", ":", "raise", "error", ".", "MetainfoError", "(", "\"Missing 'pieces' in ['info']\"", ")", "# Extract 'pieces' from metainfo because it's the only byte string", "# that isn't supposed to be decoded to unicode.", "if", "b'info'", "in", "metainfo_enc", "and", "b'pieces'", "in", "metainfo_enc", "[", "b'info'", "]", ":", "pieces", "=", "metainfo_enc", "[", "b'info'", "]", ".", "pop", "(", "b'pieces'", ")", "metainfo", "=", "utils", ".", "decode_dict", "(", "metainfo_enc", ")", "metainfo", "[", "'info'", "]", "[", "'pieces'", "]", "=", "pieces", "else", ":", "metainfo", "=", "utils", ".", "decode_dict", "(", "metainfo_enc", ")", "torrent", "=", "cls", "(", ")", "torrent", ".", "_metainfo", "=", "metainfo", "# Convert some values from official types to something nicer", "# (e.g. int -> datetime)", "for", "attr", "in", "(", "'creation_date'", ",", "'private'", ")", ":", "setattr", "(", "torrent", ",", "attr", ",", "getattr", "(", "torrent", ",", "attr", ")", ")", "# Auto-set 'include_md5'", "info", "=", "torrent", ".", "metainfo", "[", "'info'", "]", "torrent", ".", "include_md5", "=", "(", "'length'", "in", "info", "and", "'md5sum'", "in", "info", ")", "or", "(", "'files'", "in", "info", "and", "all", "(", "'md5sum'", "in", "fileinfo", "for", "fileinfo", "in", "info", "[", "'files'", "]", ")", ")", "if", "validate", ":", "torrent", ".", "validate", "(", ")", "return", "torrent" ]
Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object
[ "Read", "torrent", "metainfo", "from", "file", "-", "like", "object" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L903-L963
train
rndusr/torf
torf/_torrent.py
Torrent.read
def read(cls, filepath, validate=True): """ Read torrent metainfo from file :param filepath: Path of the torrent file :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `filepath` fails :raises ParseError: if `filepath` does not contain a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: with open(filepath, 'rb') as fh: return cls.read_stream(fh) except (OSError, error.ReadError) as e: raise error.ReadError(e.errno, filepath) except error.ParseError: raise error.ParseError(filepath)
python
def read(cls, filepath, validate=True): """ Read torrent metainfo from file :param filepath: Path of the torrent file :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `filepath` fails :raises ParseError: if `filepath` does not contain a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: with open(filepath, 'rb') as fh: return cls.read_stream(fh) except (OSError, error.ReadError) as e: raise error.ReadError(e.errno, filepath) except error.ParseError: raise error.ParseError(filepath)
[ "def", "read", "(", "cls", ",", "filepath", ",", "validate", "=", "True", ")", ":", "try", ":", "with", "open", "(", "filepath", ",", "'rb'", ")", "as", "fh", ":", "return", "cls", ".", "read_stream", "(", "fh", ")", "except", "(", "OSError", ",", "error", ".", "ReadError", ")", "as", "e", ":", "raise", "error", ".", "ReadError", "(", "e", ".", "errno", ",", "filepath", ")", "except", "error", ".", "ParseError", ":", "raise", "error", ".", "ParseError", "(", "filepath", ")" ]
Read torrent metainfo from file :param filepath: Path of the torrent file :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `filepath` fails :raises ParseError: if `filepath` does not contain a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object
[ "Read", "torrent", "metainfo", "from", "file" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L966-L988
train
rndusr/torf
torf/_torrent.py
Torrent.copy
def copy(self): """ Return a new object with the same metainfo Internally, this simply copies the internal metainfo dictionary with :func:`copy.deepcopy` and gives it to the new instance. """ from copy import deepcopy cp = type(self)() cp._metainfo = deepcopy(self._metainfo) return cp
python
def copy(self): """ Return a new object with the same metainfo Internally, this simply copies the internal metainfo dictionary with :func:`copy.deepcopy` and gives it to the new instance. """ from copy import deepcopy cp = type(self)() cp._metainfo = deepcopy(self._metainfo) return cp
[ "def", "copy", "(", "self", ")", ":", "from", "copy", "import", "deepcopy", "cp", "=", "type", "(", "self", ")", "(", ")", "cp", ".", "_metainfo", "=", "deepcopy", "(", "self", ".", "_metainfo", ")", "return", "cp" ]
Return a new object with the same metainfo Internally, this simply copies the internal metainfo dictionary with :func:`copy.deepcopy` and gives it to the new instance.
[ "Return", "a", "new", "object", "with", "the", "same", "metainfo" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L990-L1000
train
rndusr/torf
torf/_utils.py
validated_url
def validated_url(url): """Return url if valid, raise URLError otherwise""" try: u = urlparse(url) u.port # Trigger 'invalid port' exception except Exception: raise error.URLError(url) else: if not u.scheme or not u.netloc: raise error.URLError(url) return url
python
def validated_url(url): """Return url if valid, raise URLError otherwise""" try: u = urlparse(url) u.port # Trigger 'invalid port' exception except Exception: raise error.URLError(url) else: if not u.scheme or not u.netloc: raise error.URLError(url) return url
[ "def", "validated_url", "(", "url", ")", ":", "try", ":", "u", "=", "urlparse", "(", "url", ")", "u", ".", "port", "# Trigger 'invalid port' exception", "except", "Exception", ":", "raise", "error", ".", "URLError", "(", "url", ")", "else", ":", "if", "not", "u", ".", "scheme", "or", "not", "u", ".", "netloc", ":", "raise", "error", ".", "URLError", "(", "url", ")", "return", "url" ]
Return url if valid, raise URLError otherwise
[ "Return", "url", "if", "valid", "raise", "URLError", "otherwise" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L38-L48
train
rndusr/torf
torf/_utils.py
read_chunks
def read_chunks(filepath, chunk_size): """Generator that yields chunks from file""" try: with open(filepath, 'rb') as f: while True: chunk = f.read(chunk_size) if chunk: yield chunk else: break # EOF except OSError as e: raise error.ReadError(e.errno, filepath)
python
def read_chunks(filepath, chunk_size): """Generator that yields chunks from file""" try: with open(filepath, 'rb') as f: while True: chunk = f.read(chunk_size) if chunk: yield chunk else: break # EOF except OSError as e: raise error.ReadError(e.errno, filepath)
[ "def", "read_chunks", "(", "filepath", ",", "chunk_size", ")", ":", "try", ":", "with", "open", "(", "filepath", ",", "'rb'", ")", "as", "f", ":", "while", "True", ":", "chunk", "=", "f", ".", "read", "(", "chunk_size", ")", "if", "chunk", ":", "yield", "chunk", "else", ":", "break", "# EOF", "except", "OSError", "as", "e", ":", "raise", "error", ".", "ReadError", "(", "e", ".", "errno", ",", "filepath", ")" ]
Generator that yields chunks from file
[ "Generator", "that", "yields", "chunks", "from", "file" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L51-L62
train
rndusr/torf
torf/_utils.py
calc_piece_size
def calc_piece_size(total_size, max_pieces, min_piece_size, max_piece_size): """Calculate piece size""" ps = 1 << max(0, math.ceil(math.log(total_size / max_pieces, 2))) if ps < min_piece_size: ps = min_piece_size if ps > max_piece_size: ps = max_piece_size return ps
python
def calc_piece_size(total_size, max_pieces, min_piece_size, max_piece_size): """Calculate piece size""" ps = 1 << max(0, math.ceil(math.log(total_size / max_pieces, 2))) if ps < min_piece_size: ps = min_piece_size if ps > max_piece_size: ps = max_piece_size return ps
[ "def", "calc_piece_size", "(", "total_size", ",", "max_pieces", ",", "min_piece_size", ",", "max_piece_size", ")", ":", "ps", "=", "1", "<<", "max", "(", "0", ",", "math", ".", "ceil", "(", "math", ".", "log", "(", "total_size", "/", "max_pieces", ",", "2", ")", ")", ")", "if", "ps", "<", "min_piece_size", ":", "ps", "=", "min_piece_size", "if", "ps", ">", "max_piece_size", ":", "ps", "=", "max_piece_size", "return", "ps" ]
Calculate piece size
[ "Calculate", "piece", "size" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L65-L72
train
rndusr/torf
torf/_utils.py
is_power_of_2
def is_power_of_2(num): """Return whether `num` is a power of two""" log = math.log2(num) return int(log) == float(log)
python
def is_power_of_2(num): """Return whether `num` is a power of two""" log = math.log2(num) return int(log) == float(log)
[ "def", "is_power_of_2", "(", "num", ")", ":", "log", "=", "math", ".", "log2", "(", "num", ")", "return", "int", "(", "log", ")", "==", "float", "(", "log", ")" ]
Return whether `num` is a power of two
[ "Return", "whether", "num", "is", "a", "power", "of", "two" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L75-L78
train
rndusr/torf
torf/_utils.py
is_hidden
def is_hidden(path): """Whether file or directory is hidden""" for name in path.split(os.sep): if name != '.' and name != '..' and name and name[0] == '.': return True return False
python
def is_hidden(path): """Whether file or directory is hidden""" for name in path.split(os.sep): if name != '.' and name != '..' and name and name[0] == '.': return True return False
[ "def", "is_hidden", "(", "path", ")", ":", "for", "name", "in", "path", ".", "split", "(", "os", ".", "sep", ")", ":", "if", "name", "!=", "'.'", "and", "name", "!=", "'..'", "and", "name", "and", "name", "[", "0", "]", "==", "'.'", ":", "return", "True", "return", "False" ]
Whether file or directory is hidden
[ "Whether", "file", "or", "directory", "is", "hidden" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L81-L86
train
rndusr/torf
torf/_utils.py
filepaths
def filepaths(path, exclude=(), hidden=True, empty=True): """ Return list of absolute, sorted file paths path: Path to file or directory exclude: List of file name patterns to exclude hidden: Whether to include hidden files empty: Whether to include empty files Raise PathNotFoundError if path doesn't exist. """ if not os.path.exists(path): raise error.PathNotFoundError(path) elif not os.access(path, os.R_OK, effective_ids=os.access in os.supports_effective_ids): raise error.ReadError(errno.EACCES, path) if os.path.isfile(path): return [path] else: filepaths = [] for dirpath, dirnames, filenames in os.walk(path): # Ignore hidden directory if not hidden and is_hidden(dirpath): continue for filename in filenames: # Ignore hidden file if not hidden and is_hidden(filename): continue filepath = os.path.join(dirpath, filename) # Ignore excluded file if any(is_match(filepath, pattern) for pattern in exclude): continue else: # Ignore empty file if empty or os.path.getsize(os.path.realpath(filepath)) > 0: filepaths.append(filepath) return sorted(filepaths, key=lambda fp: fp.casefold())
python
def filepaths(path, exclude=(), hidden=True, empty=True): """ Return list of absolute, sorted file paths path: Path to file or directory exclude: List of file name patterns to exclude hidden: Whether to include hidden files empty: Whether to include empty files Raise PathNotFoundError if path doesn't exist. """ if not os.path.exists(path): raise error.PathNotFoundError(path) elif not os.access(path, os.R_OK, effective_ids=os.access in os.supports_effective_ids): raise error.ReadError(errno.EACCES, path) if os.path.isfile(path): return [path] else: filepaths = [] for dirpath, dirnames, filenames in os.walk(path): # Ignore hidden directory if not hidden and is_hidden(dirpath): continue for filename in filenames: # Ignore hidden file if not hidden and is_hidden(filename): continue filepath = os.path.join(dirpath, filename) # Ignore excluded file if any(is_match(filepath, pattern) for pattern in exclude): continue else: # Ignore empty file if empty or os.path.getsize(os.path.realpath(filepath)) > 0: filepaths.append(filepath) return sorted(filepaths, key=lambda fp: fp.casefold())
[ "def", "filepaths", "(", "path", ",", "exclude", "=", "(", ")", ",", "hidden", "=", "True", ",", "empty", "=", "True", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "error", ".", "PathNotFoundError", "(", "path", ")", "elif", "not", "os", ".", "access", "(", "path", ",", "os", ".", "R_OK", ",", "effective_ids", "=", "os", ".", "access", "in", "os", ".", "supports_effective_ids", ")", ":", "raise", "error", ".", "ReadError", "(", "errno", ".", "EACCES", ",", "path", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "return", "[", "path", "]", "else", ":", "filepaths", "=", "[", "]", "for", "dirpath", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "path", ")", ":", "# Ignore hidden directory", "if", "not", "hidden", "and", "is_hidden", "(", "dirpath", ")", ":", "continue", "for", "filename", "in", "filenames", ":", "# Ignore hidden file", "if", "not", "hidden", "and", "is_hidden", "(", "filename", ")", ":", "continue", "filepath", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filename", ")", "# Ignore excluded file", "if", "any", "(", "is_match", "(", "filepath", ",", "pattern", ")", "for", "pattern", "in", "exclude", ")", ":", "continue", "else", ":", "# Ignore empty file", "if", "empty", "or", "os", ".", "path", ".", "getsize", "(", "os", ".", "path", ".", "realpath", "(", "filepath", ")", ")", ">", "0", ":", "filepaths", ".", "append", "(", "filepath", ")", "return", "sorted", "(", "filepaths", ",", "key", "=", "lambda", "fp", ":", "fp", ".", "casefold", "(", ")", ")" ]
Return list of absolute, sorted file paths path: Path to file or directory exclude: List of file name patterns to exclude hidden: Whether to include hidden files empty: Whether to include empty files Raise PathNotFoundError if path doesn't exist.
[ "Return", "list", "of", "absolute", "sorted", "file", "paths" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L96-L136
train
rndusr/torf
torf/_utils.py
assert_type
def assert_type(lst_or_dct, keys, exp_types, must_exist=True, check=None): """ Raise MetainfoError is not of a particular type lst_or_dct: list or dict instance keys: Sequence of keys so that `lst_or_dct[key[0]][key[1]]...` resolves to a value exp_types: Sequence of types that the value specified by `keys` must be an instance of must_exist: Whether to raise MetainfoError if `keys` does not resolve to a value check: Callable that gets the value specified by `keys` and returns True if it OK, False otherwise """ keys = list(keys) keychain = [] while len(keys[:-1]) > 0: key = keys.pop(0) try: lst_or_dct = lst_or_dct[key] except (KeyError, IndexError): break keychain.append(key) keychain_str = ''.join(f'[{key!r}]' for key in keychain) key = keys.pop(0) if not key_exists_in_list_or_dict(key, lst_or_dct): if not must_exist: return raise error.MetainfoError(f"Missing {key!r} in {keychain_str}") elif not isinstance(lst_or_dct[key], exp_types): exp_types_str = ' or '.join(t.__name__ for t in exp_types) type_str = type(lst_or_dct[key]).__name__ raise error.MetainfoError(f"{keychain_str}[{key!r}] must be {exp_types_str}, " f"not {type_str}: {lst_or_dct[key]!r}") elif check is not None and not check(lst_or_dct[key]): raise error.MetainfoError(f"{keychain_str}[{key!r}] is invalid: {lst_or_dct[key]!r}")
python
def assert_type(lst_or_dct, keys, exp_types, must_exist=True, check=None): """ Raise MetainfoError is not of a particular type lst_or_dct: list or dict instance keys: Sequence of keys so that `lst_or_dct[key[0]][key[1]]...` resolves to a value exp_types: Sequence of types that the value specified by `keys` must be an instance of must_exist: Whether to raise MetainfoError if `keys` does not resolve to a value check: Callable that gets the value specified by `keys` and returns True if it OK, False otherwise """ keys = list(keys) keychain = [] while len(keys[:-1]) > 0: key = keys.pop(0) try: lst_or_dct = lst_or_dct[key] except (KeyError, IndexError): break keychain.append(key) keychain_str = ''.join(f'[{key!r}]' for key in keychain) key = keys.pop(0) if not key_exists_in_list_or_dict(key, lst_or_dct): if not must_exist: return raise error.MetainfoError(f"Missing {key!r} in {keychain_str}") elif not isinstance(lst_or_dct[key], exp_types): exp_types_str = ' or '.join(t.__name__ for t in exp_types) type_str = type(lst_or_dct[key]).__name__ raise error.MetainfoError(f"{keychain_str}[{key!r}] must be {exp_types_str}, " f"not {type_str}: {lst_or_dct[key]!r}") elif check is not None and not check(lst_or_dct[key]): raise error.MetainfoError(f"{keychain_str}[{key!r}] is invalid: {lst_or_dct[key]!r}")
[ "def", "assert_type", "(", "lst_or_dct", ",", "keys", ",", "exp_types", ",", "must_exist", "=", "True", ",", "check", "=", "None", ")", ":", "keys", "=", "list", "(", "keys", ")", "keychain", "=", "[", "]", "while", "len", "(", "keys", "[", ":", "-", "1", "]", ")", ">", "0", ":", "key", "=", "keys", ".", "pop", "(", "0", ")", "try", ":", "lst_or_dct", "=", "lst_or_dct", "[", "key", "]", "except", "(", "KeyError", ",", "IndexError", ")", ":", "break", "keychain", ".", "append", "(", "key", ")", "keychain_str", "=", "''", ".", "join", "(", "f'[{key!r}]'", "for", "key", "in", "keychain", ")", "key", "=", "keys", ".", "pop", "(", "0", ")", "if", "not", "key_exists_in_list_or_dict", "(", "key", ",", "lst_or_dct", ")", ":", "if", "not", "must_exist", ":", "return", "raise", "error", ".", "MetainfoError", "(", "f\"Missing {key!r} in {keychain_str}\"", ")", "elif", "not", "isinstance", "(", "lst_or_dct", "[", "key", "]", ",", "exp_types", ")", ":", "exp_types_str", "=", "' or '", ".", "join", "(", "t", ".", "__name__", "for", "t", "in", "exp_types", ")", "type_str", "=", "type", "(", "lst_or_dct", "[", "key", "]", ")", ".", "__name__", "raise", "error", ".", "MetainfoError", "(", "f\"{keychain_str}[{key!r}] must be {exp_types_str}, \"", "f\"not {type_str}: {lst_or_dct[key]!r}\"", ")", "elif", "check", "is", "not", "None", "and", "not", "check", "(", "lst_or_dct", "[", "key", "]", ")", ":", "raise", "error", ".", "MetainfoError", "(", "f\"{keychain_str}[{key!r}] is invalid: {lst_or_dct[key]!r}\"", ")" ]
Raise MetainfoError is not of a particular type lst_or_dct: list or dict instance keys: Sequence of keys so that `lst_or_dct[key[0]][key[1]]...` resolves to a value exp_types: Sequence of types that the value specified by `keys` must be an instance of must_exist: Whether to raise MetainfoError if `keys` does not resolve to a value check: Callable that gets the value specified by `keys` and returns True if it OK, False otherwise
[ "Raise", "MetainfoError", "is", "not", "of", "a", "particular", "type" ]
df0363232daacd3f8c91aafddaa0623b8c28cbd2
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_utils.py#L149-L188
train
reincubate/ricloud
ricloud/utils.py
error_message_and_exit
def error_message_and_exit(message, error_result): """Prints error messages in blue, the failed task result and quits.""" if message: error_message(message) puts(json.dumps(error_result, indent=2)) sys.exit(1)
python
def error_message_and_exit(message, error_result): """Prints error messages in blue, the failed task result and quits.""" if message: error_message(message) puts(json.dumps(error_result, indent=2)) sys.exit(1)
[ "def", "error_message_and_exit", "(", "message", ",", "error_result", ")", ":", "if", "message", ":", "error_message", "(", "message", ")", "puts", "(", "json", ".", "dumps", "(", "error_result", ",", "indent", "=", "2", ")", ")", "sys", ".", "exit", "(", "1", ")" ]
Prints error messages in blue, the failed task result and quits.
[ "Prints", "error", "messages", "in", "blue", "the", "failed", "task", "result", "and", "quits", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L82-L87
train
reincubate/ricloud
ricloud/utils.py
print_prompt_values
def print_prompt_values(values, message=None, sub_attr=None): """Prints prompt title and choices with a bit of formatting.""" if message: prompt_message(message) for index, entry in enumerate(values): if sub_attr: line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr)) else: line = '{:2d}: {}'.format(index, utf8(entry)) with indent(3): print_message(line)
python
def print_prompt_values(values, message=None, sub_attr=None): """Prints prompt title and choices with a bit of formatting.""" if message: prompt_message(message) for index, entry in enumerate(values): if sub_attr: line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr)) else: line = '{:2d}: {}'.format(index, utf8(entry)) with indent(3): print_message(line)
[ "def", "print_prompt_values", "(", "values", ",", "message", "=", "None", ",", "sub_attr", "=", "None", ")", ":", "if", "message", ":", "prompt_message", "(", "message", ")", "for", "index", ",", "entry", "in", "enumerate", "(", "values", ")", ":", "if", "sub_attr", ":", "line", "=", "'{:2d}: {}'", ".", "format", "(", "index", ",", "getattr", "(", "utf8", "(", "entry", ")", ",", "sub_attr", ")", ")", "else", ":", "line", "=", "'{:2d}: {}'", ".", "format", "(", "index", ",", "utf8", "(", "entry", ")", ")", "with", "indent", "(", "3", ")", ":", "print_message", "(", "line", ")" ]
Prints prompt title and choices with a bit of formatting.
[ "Prints", "prompt", "title", "and", "choices", "with", "a", "bit", "of", "formatting", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L90-L102
train
reincubate/ricloud
ricloud/utils.py
prompt_for_input
def prompt_for_input(message, input_type=None): """Prints prompt instruction and does basic input parsing.""" while True: output = prompt.query(message) if input_type: try: output = input_type(output) except ValueError: error_message('Invalid input type') continue break return output
python
def prompt_for_input(message, input_type=None): """Prints prompt instruction and does basic input parsing.""" while True: output = prompt.query(message) if input_type: try: output = input_type(output) except ValueError: error_message('Invalid input type') continue break return output
[ "def", "prompt_for_input", "(", "message", ",", "input_type", "=", "None", ")", ":", "while", "True", ":", "output", "=", "prompt", ".", "query", "(", "message", ")", "if", "input_type", ":", "try", ":", "output", "=", "input_type", "(", "output", ")", "except", "ValueError", ":", "error_message", "(", "'Invalid input type'", ")", "continue", "break", "return", "output" ]
Prints prompt instruction and does basic input parsing.
[ "Prints", "prompt", "instruction", "and", "does", "basic", "input", "parsing", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L105-L119
train
reincubate/ricloud
ricloud/utils.py
prompt_for_choice
def prompt_for_choice(values, message, input_type=int, output_type=None): """Prints prompt with a list of choices to choose from.""" output = None while not output: index = prompt_for_input(message, input_type=input_type) try: output = utf8(values[index]) except IndexError: error_message('Selection out of range') continue if output_type: output = output_type(output) return output
python
def prompt_for_choice(values, message, input_type=int, output_type=None): """Prints prompt with a list of choices to choose from.""" output = None while not output: index = prompt_for_input(message, input_type=input_type) try: output = utf8(values[index]) except IndexError: error_message('Selection out of range') continue if output_type: output = output_type(output) return output
[ "def", "prompt_for_choice", "(", "values", ",", "message", ",", "input_type", "=", "int", ",", "output_type", "=", "None", ")", ":", "output", "=", "None", "while", "not", "output", ":", "index", "=", "prompt_for_input", "(", "message", ",", "input_type", "=", "input_type", ")", "try", ":", "output", "=", "utf8", "(", "values", "[", "index", "]", ")", "except", "IndexError", ":", "error_message", "(", "'Selection out of range'", ")", "continue", "if", "output_type", ":", "output", "=", "output_type", "(", "output", ")", "return", "output" ]
Prints prompt with a list of choices to choose from.
[ "Prints", "prompt", "with", "a", "list", "of", "choices", "to", "choose", "from", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L122-L137
train
reincubate/ricloud
ricloud/object_store.py
ObjectStore._retrieve_result
def _retrieve_result(endpoints, token_header): """Prepare the request list and execute them concurrently.""" request_list = [ (url, token_header) for (task_id, url) in endpoints ] responses = concurrent_get(request_list) # Quick sanity check assert len(endpoints) == len(responses) responses_dic = { task_id: r.content for (task_id, _), r in zip(endpoints, responses) } return responses_dic
python
def _retrieve_result(endpoints, token_header): """Prepare the request list and execute them concurrently.""" request_list = [ (url, token_header) for (task_id, url) in endpoints ] responses = concurrent_get(request_list) # Quick sanity check assert len(endpoints) == len(responses) responses_dic = { task_id: r.content for (task_id, _), r in zip(endpoints, responses) } return responses_dic
[ "def", "_retrieve_result", "(", "endpoints", ",", "token_header", ")", ":", "request_list", "=", "[", "(", "url", ",", "token_header", ")", "for", "(", "task_id", ",", "url", ")", "in", "endpoints", "]", "responses", "=", "concurrent_get", "(", "request_list", ")", "# Quick sanity check", "assert", "len", "(", "endpoints", ")", "==", "len", "(", "responses", ")", "responses_dic", "=", "{", "task_id", ":", "r", ".", "content", "for", "(", "task_id", ",", "_", ")", ",", "r", "in", "zip", "(", "endpoints", ",", "responses", ")", "}", "return", "responses_dic" ]
Prepare the request list and execute them concurrently.
[ "Prepare", "the", "request", "list", "and", "execute", "them", "concurrently", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/object_store.py#L62-L78
train
reincubate/ricloud
ricloud/asmaster_api.py
AsmasterApi._build_endpoint
def _build_endpoint(self, endpoint_name): """Generate an enpoint url from a setting name. Args: endpoint_name(str): setting name for the enpoint to build Returns: (str) url enpoint """ endpoint_relative = settings.get('asmaster_endpoints', endpoint_name) return '%s%s' % (self.host, endpoint_relative)
python
def _build_endpoint(self, endpoint_name): """Generate an enpoint url from a setting name. Args: endpoint_name(str): setting name for the enpoint to build Returns: (str) url enpoint """ endpoint_relative = settings.get('asmaster_endpoints', endpoint_name) return '%s%s' % (self.host, endpoint_relative)
[ "def", "_build_endpoint", "(", "self", ",", "endpoint_name", ")", ":", "endpoint_relative", "=", "settings", ".", "get", "(", "'asmaster_endpoints'", ",", "endpoint_name", ")", "return", "'%s%s'", "%", "(", "self", ".", "host", ",", "endpoint_relative", ")" ]
Generate an enpoint url from a setting name. Args: endpoint_name(str): setting name for the enpoint to build Returns: (str) url enpoint
[ "Generate", "an", "enpoint", "url", "from", "a", "setting", "name", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L33-L43
train
reincubate/ricloud
ricloud/asmaster_api.py
AsmasterApi._set_allowed_services_and_actions
def _set_allowed_services_and_actions(self, services): """Expect services to be a list of service dictionaries, each with `name` and `actions` keys.""" for service in services: self.services[service['name']] = {} for action in service['actions']: name = action.pop('name') self.services[service['name']][name] = action
python
def _set_allowed_services_and_actions(self, services): """Expect services to be a list of service dictionaries, each with `name` and `actions` keys.""" for service in services: self.services[service['name']] = {} for action in service['actions']: name = action.pop('name') self.services[service['name']][name] = action
[ "def", "_set_allowed_services_and_actions", "(", "self", ",", "services", ")", ":", "for", "service", "in", "services", ":", "self", ".", "services", "[", "service", "[", "'name'", "]", "]", "=", "{", "}", "for", "action", "in", "service", "[", "'actions'", "]", ":", "name", "=", "action", ".", "pop", "(", "'name'", ")", "self", ".", "services", "[", "service", "[", "'name'", "]", "]", "[", "name", "]", "=", "action" ]
Expect services to be a list of service dictionaries, each with `name` and `actions` keys.
[ "Expect", "services", "to", "be", "a", "list", "of", "service", "dictionaries", "each", "with", "name", "and", "actions", "keys", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L59-L66
train
reincubate/ricloud
ricloud/asmaster_api.py
AsmasterApi.list_subscriptions
def list_subscriptions(self, service): """Asks for a list of all subscribed accounts and devices, along with their statuses.""" data = { 'service': service, } return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
python
def list_subscriptions(self, service): """Asks for a list of all subscribed accounts and devices, along with their statuses.""" data = { 'service': service, } return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
[ "def", "list_subscriptions", "(", "self", ",", "service", ")", ":", "data", "=", "{", "'service'", ":", "service", ",", "}", "return", "self", ".", "_perform_post_request", "(", "self", ".", "list_subscriptions_endpoint", ",", "data", ",", "self", ".", "token_header", ")" ]
Asks for a list of all subscribed accounts and devices, along with their statuses.
[ "Asks", "for", "a", "list", "of", "all", "subscribed", "accounts", "and", "devices", "along", "with", "their", "statuses", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L84-L89
train
reincubate/ricloud
ricloud/asmaster_api.py
AsmasterApi.subscribe_account
def subscribe_account(self, username, password, service): """Subscribe an account for a service. """ data = { 'service': service, 'username': username, 'password': password, } return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
python
def subscribe_account(self, username, password, service): """Subscribe an account for a service. """ data = { 'service': service, 'username': username, 'password': password, } return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
[ "def", "subscribe_account", "(", "self", ",", "username", ",", "password", ",", "service", ")", ":", "data", "=", "{", "'service'", ":", "service", ",", "'username'", ":", "username", ",", "'password'", ":", "password", ",", "}", "return", "self", ".", "_perform_post_request", "(", "self", ".", "subscribe_account_endpoint", ",", "data", ",", "self", ".", "token_header", ")" ]
Subscribe an account for a service.
[ "Subscribe", "an", "account", "for", "a", "service", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L91-L100
train
reincubate/ricloud
ricloud/asmaster_listener.py
AsmasterDownloadFileHandler.file_id_to_file_name
def file_id_to_file_name(file_id): """Sometimes file ids are not the file names on the device, but are instead generated by the API. These are not guaranteed to be valid file names so need hashing. """ if len(file_id) == 40 and re.match("^[a-f0-9]+$", file_id): return file_id # prefix with "re_" to avoid name collision with real fileids return "re_{}".format(hashlib.sha1(file_id).hexdigest())
python
def file_id_to_file_name(file_id): """Sometimes file ids are not the file names on the device, but are instead generated by the API. These are not guaranteed to be valid file names so need hashing. """ if len(file_id) == 40 and re.match("^[a-f0-9]+$", file_id): return file_id # prefix with "re_" to avoid name collision with real fileids return "re_{}".format(hashlib.sha1(file_id).hexdigest())
[ "def", "file_id_to_file_name", "(", "file_id", ")", ":", "if", "len", "(", "file_id", ")", "==", "40", "and", "re", ".", "match", "(", "\"^[a-f0-9]+$\"", ",", "file_id", ")", ":", "return", "file_id", "# prefix with \"re_\" to avoid name collision with real fileids", "return", "\"re_{}\"", ".", "format", "(", "hashlib", ".", "sha1", "(", "file_id", ")", ".", "hexdigest", "(", ")", ")" ]
Sometimes file ids are not the file names on the device, but are instead generated by the API. These are not guaranteed to be valid file names so need hashing.
[ "Sometimes", "file", "ids", "are", "not", "the", "file", "names", "on", "the", "device", "but", "are", "instead", "generated", "by", "the", "API", ".", "These", "are", "not", "guaranteed", "to", "be", "valid", "file", "names", "so", "need", "hashing", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_listener.py#L216-L223
train
reincubate/ricloud
ricloud/clients/base.py
sync
def sync(func): """Decorator to make a task synchronous.""" sync_timeout = 3600 # Match standard synchronous timeout. def wraps(*args, **kwargs): task = func(*args, **kwargs) task.wait_for_result(timeout=sync_timeout) result = json.loads(task.result) return result return wraps
python
def sync(func): """Decorator to make a task synchronous.""" sync_timeout = 3600 # Match standard synchronous timeout. def wraps(*args, **kwargs): task = func(*args, **kwargs) task.wait_for_result(timeout=sync_timeout) result = json.loads(task.result) return result return wraps
[ "def", "sync", "(", "func", ")", ":", "sync_timeout", "=", "3600", "# Match standard synchronous timeout.", "def", "wraps", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "task", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "task", ".", "wait_for_result", "(", "timeout", "=", "sync_timeout", ")", "result", "=", "json", ".", "loads", "(", "task", ".", "result", ")", "return", "result", "return", "wraps" ]
Decorator to make a task synchronous.
[ "Decorator", "to", "make", "a", "task", "synchronous", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/clients/base.py#L4-L14
train
reincubate/ricloud
ricloud/samples/live_sample.py
SampleLiveICloudApplication.fetch_data
def fetch_data(self): """Prompt for a data type choice and execute the `fetch_data` task. The results are saved to a file in json format. """ choices = self.available_data choices.insert(0, 'All') selected_data_type = utils.select_item( choices, 'Please select what data to fetch:', 'Available data:', ) if selected_data_type == 'All': selected_data_type = ','.join(self.available_data) utils.pending_message('Performing fetch data task...') fetch_data_task = self.client.data( account=self.account, data=selected_data_type, ) # Wait here for result as rest of sample app relies on it. fetch_data_task.wait_for_result(timeout=self.timeout) fetch_data_result = json.loads(fetch_data_task.result) # Write the result to file. task_id = fetch_data_task.uuid filepath = utils.get_or_create_filepath('%s.json' % task_id) with open(filepath, 'w') as out: json.dump(fetch_data_result, out, indent=2) utils.info_message('Fetch data successful. Output file: %s.json' % task_id) return fetch_data_result
python
def fetch_data(self): """Prompt for a data type choice and execute the `fetch_data` task. The results are saved to a file in json format. """ choices = self.available_data choices.insert(0, 'All') selected_data_type = utils.select_item( choices, 'Please select what data to fetch:', 'Available data:', ) if selected_data_type == 'All': selected_data_type = ','.join(self.available_data) utils.pending_message('Performing fetch data task...') fetch_data_task = self.client.data( account=self.account, data=selected_data_type, ) # Wait here for result as rest of sample app relies on it. fetch_data_task.wait_for_result(timeout=self.timeout) fetch_data_result = json.loads(fetch_data_task.result) # Write the result to file. task_id = fetch_data_task.uuid filepath = utils.get_or_create_filepath('%s.json' % task_id) with open(filepath, 'w') as out: json.dump(fetch_data_result, out, indent=2) utils.info_message('Fetch data successful. Output file: %s.json' % task_id) return fetch_data_result
[ "def", "fetch_data", "(", "self", ")", ":", "choices", "=", "self", ".", "available_data", "choices", ".", "insert", "(", "0", ",", "'All'", ")", "selected_data_type", "=", "utils", ".", "select_item", "(", "choices", ",", "'Please select what data to fetch:'", ",", "'Available data:'", ",", ")", "if", "selected_data_type", "==", "'All'", ":", "selected_data_type", "=", "','", ".", "join", "(", "self", ".", "available_data", ")", "utils", ".", "pending_message", "(", "'Performing fetch data task...'", ")", "fetch_data_task", "=", "self", ".", "client", ".", "data", "(", "account", "=", "self", ".", "account", ",", "data", "=", "selected_data_type", ",", ")", "# Wait here for result as rest of sample app relies on it.", "fetch_data_task", ".", "wait_for_result", "(", "timeout", "=", "self", ".", "timeout", ")", "fetch_data_result", "=", "json", ".", "loads", "(", "fetch_data_task", ".", "result", ")", "# Write the result to file.", "task_id", "=", "fetch_data_task", ".", "uuid", "filepath", "=", "utils", ".", "get_or_create_filepath", "(", "'%s.json'", "%", "task_id", ")", "with", "open", "(", "filepath", ",", "'w'", ")", "as", "out", ":", "json", ".", "dump", "(", "fetch_data_result", ",", "out", ",", "indent", "=", "2", ")", "utils", ".", "info_message", "(", "'Fetch data successful. Output file: %s.json'", "%", "task_id", ")", "return", "fetch_data_result" ]
Prompt for a data type choice and execute the `fetch_data` task. The results are saved to a file in json format.
[ "Prompt", "for", "a", "data", "type", "choice", "and", "execute", "the", "fetch_data", "task", ".", "The", "results", "are", "saved", "to", "a", "file", "in", "json", "format", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/live_sample.py#L22-L58
train
reincubate/ricloud
ricloud/samples/icloud_sample.py
SampleICloudApplication.log_in
def log_in(self): """Perform the `log_in` task to setup the API session for future data requests.""" if not self.password: # Password wasn't give, ask for it now self.password = getpass.getpass('Password: ') utils.pending_message('Performing login...') login_result = self.client.login( account=self.account, password=self.password ) if 'error' in login_result: self.handle_failed_login(login_result) utils.info_message('Login successful')
python
def log_in(self): """Perform the `log_in` task to setup the API session for future data requests.""" if not self.password: # Password wasn't give, ask for it now self.password = getpass.getpass('Password: ') utils.pending_message('Performing login...') login_result = self.client.login( account=self.account, password=self.password ) if 'error' in login_result: self.handle_failed_login(login_result) utils.info_message('Login successful')
[ "def", "log_in", "(", "self", ")", ":", "if", "not", "self", ".", "password", ":", "# Password wasn't give, ask for it now", "self", ".", "password", "=", "getpass", ".", "getpass", "(", "'Password: '", ")", "utils", ".", "pending_message", "(", "'Performing login...'", ")", "login_result", "=", "self", ".", "client", ".", "login", "(", "account", "=", "self", ".", "account", ",", "password", "=", "self", ".", "password", ")", "if", "'error'", "in", "login_result", ":", "self", ".", "handle_failed_login", "(", "login_result", ")", "utils", ".", "info_message", "(", "'Login successful'", ")" ]
Perform the `log_in` task to setup the API session for future data requests.
[ "Perform", "the", "log_in", "task", "to", "setup", "the", "API", "session", "for", "future", "data", "requests", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L43-L59
train
reincubate/ricloud
ricloud/samples/icloud_sample.py
SampleICloudApplication.get_devices
def get_devices(self): """Execute the `get_devices` task and store the results in `self.devices`.""" utils.pending_message('Fetching device list...') get_devices_task = self.client.devices( account=self.account ) # We wait for device list info as this sample relies on it next. get_devices_task.wait_for_result(timeout=self.timeout) get_devices_result = json.loads(get_devices_task.result) self.devices = get_devices_result['devices'] utils.info_message('Get devices successful')
python
def get_devices(self): """Execute the `get_devices` task and store the results in `self.devices`.""" utils.pending_message('Fetching device list...') get_devices_task = self.client.devices( account=self.account ) # We wait for device list info as this sample relies on it next. get_devices_task.wait_for_result(timeout=self.timeout) get_devices_result = json.loads(get_devices_task.result) self.devices = get_devices_result['devices'] utils.info_message('Get devices successful')
[ "def", "get_devices", "(", "self", ")", ":", "utils", ".", "pending_message", "(", "'Fetching device list...'", ")", "get_devices_task", "=", "self", ".", "client", ".", "devices", "(", "account", "=", "self", ".", "account", ")", "# We wait for device list info as this sample relies on it next.", "get_devices_task", ".", "wait_for_result", "(", "timeout", "=", "self", ".", "timeout", ")", "get_devices_result", "=", "json", ".", "loads", "(", "get_devices_task", ".", "result", ")", "self", ".", "devices", "=", "get_devices_result", "[", "'devices'", "]", "utils", ".", "info_message", "(", "'Get devices successful'", ")" ]
Execute the `get_devices` task and store the results in `self.devices`.
[ "Execute", "the", "get_devices", "task", "and", "store", "the", "results", "in", "self", ".", "devices", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L113-L127
train
reincubate/ricloud
ricloud/samples/icloud_sample.py
SampleICloudApplication.download_files
def download_files(self, files): """This method uses the `download_file` task to retrieve binary files such as attachments, images and videos. Notice that this method does not wait for the tasks it creates to return a result synchronously. """ utils.pending_message( "Downloading {nfiles} file{plural}...".format( nfiles=len(files), plural='s' if len(files) > 1 else '' )) for file in files: if 'file_id' not in file: continue def build_callback(file): """Callback to save a download file result to a file on disk.""" def file_callback(task): device_name = self.devices[self.device_id]['device_name'] path_chunks = file['file_path'].split('/') directory = os.path.join('files', device_name, *path_chunks[:-1]) filepath = utils.get_or_create_filepath(file['filename'], directory) with open(filepath, 'wb') as out: out.write(task.result) if settings.getboolean('logging', 'time_profile'): filepath = utils.append_profile_info(filepath, task.timer) with indent(4): utils.print_message(filepath) return file_callback self.client.download_file( account=self.account, device=self.device_id, file=file['file_id'], callback=build_callback(file) )
python
def download_files(self, files): """This method uses the `download_file` task to retrieve binary files such as attachments, images and videos. Notice that this method does not wait for the tasks it creates to return a result synchronously. """ utils.pending_message( "Downloading {nfiles} file{plural}...".format( nfiles=len(files), plural='s' if len(files) > 1 else '' )) for file in files: if 'file_id' not in file: continue def build_callback(file): """Callback to save a download file result to a file on disk.""" def file_callback(task): device_name = self.devices[self.device_id]['device_name'] path_chunks = file['file_path'].split('/') directory = os.path.join('files', device_name, *path_chunks[:-1]) filepath = utils.get_or_create_filepath(file['filename'], directory) with open(filepath, 'wb') as out: out.write(task.result) if settings.getboolean('logging', 'time_profile'): filepath = utils.append_profile_info(filepath, task.timer) with indent(4): utils.print_message(filepath) return file_callback self.client.download_file( account=self.account, device=self.device_id, file=file['file_id'], callback=build_callback(file) )
[ "def", "download_files", "(", "self", ",", "files", ")", ":", "utils", ".", "pending_message", "(", "\"Downloading {nfiles} file{plural}...\"", ".", "format", "(", "nfiles", "=", "len", "(", "files", ")", ",", "plural", "=", "'s'", "if", "len", "(", "files", ")", ">", "1", "else", "''", ")", ")", "for", "file", "in", "files", ":", "if", "'file_id'", "not", "in", "file", ":", "continue", "def", "build_callback", "(", "file", ")", ":", "\"\"\"Callback to save a download file result to a file on disk.\"\"\"", "def", "file_callback", "(", "task", ")", ":", "device_name", "=", "self", ".", "devices", "[", "self", ".", "device_id", "]", "[", "'device_name'", "]", "path_chunks", "=", "file", "[", "'file_path'", "]", ".", "split", "(", "'/'", ")", "directory", "=", "os", ".", "path", ".", "join", "(", "'files'", ",", "device_name", ",", "*", "path_chunks", "[", ":", "-", "1", "]", ")", "filepath", "=", "utils", ".", "get_or_create_filepath", "(", "file", "[", "'filename'", "]", ",", "directory", ")", "with", "open", "(", "filepath", ",", "'wb'", ")", "as", "out", ":", "out", ".", "write", "(", "task", ".", "result", ")", "if", "settings", ".", "getboolean", "(", "'logging'", ",", "'time_profile'", ")", ":", "filepath", "=", "utils", ".", "append_profile_info", "(", "filepath", ",", "task", ".", "timer", ")", "with", "indent", "(", "4", ")", ":", "utils", ".", "print_message", "(", "filepath", ")", "return", "file_callback", "self", ".", "client", ".", "download_file", "(", "account", "=", "self", ".", "account", ",", "device", "=", "self", ".", "device_id", ",", "file", "=", "file", "[", "'file_id'", "]", ",", "callback", "=", "build_callback", "(", "file", ")", ")" ]
This method uses the `download_file` task to retrieve binary files such as attachments, images and videos. Notice that this method does not wait for the tasks it creates to return a result synchronously.
[ "This", "method", "uses", "the", "download_file", "task", "to", "retrieve", "binary", "files", "such", "as", "attachments", "images", "and", "videos", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L183-L226
train
reincubate/ricloud
ricloud/api.py
Api.register_account
def register_account(self, username, service): """Register an account against a service. The account that we're querying must be referenced during any future task requests - so we know which account to link the task too. """ data = { 'service': service, 'username': username, } return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
python
def register_account(self, username, service): """Register an account against a service. The account that we're querying must be referenced during any future task requests - so we know which account to link the task too. """ data = { 'service': service, 'username': username, } return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
[ "def", "register_account", "(", "self", ",", "username", ",", "service", ")", ":", "data", "=", "{", "'service'", ":", "service", ",", "'username'", ":", "username", ",", "}", "return", "self", ".", "_perform_post_request", "(", "self", ".", "register_account_endpoint", ",", "data", ",", "self", ".", "token_header", ")" ]
Register an account against a service. The account that we're querying must be referenced during any future task requests - so we know which account to link the task too.
[ "Register", "an", "account", "against", "a", "service", ".", "The", "account", "that", "we", "re", "querying", "must", "be", "referenced", "during", "any", "future", "task", "requests", "-", "so", "we", "know", "which", "account", "to", "link", "the", "task", "too", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L79-L90
train
reincubate/ricloud
ricloud/api.py
Api.perform_task
def perform_task(self, service, task_name, account, payload, callback=None): """Submit a task to the API. The task is executed asyncronously, and a Task object is returned. """ data = { 'service': service, 'action': task_name, 'account': account, } data.update(payload) response = self._perform_post_request(self.submit_endpoint, data, self.token_header) task = Task(uuid=response['task_id'], callback=callback) self._pending_tasks[task.uuid] = task return task
python
def perform_task(self, service, task_name, account, payload, callback=None): """Submit a task to the API. The task is executed asyncronously, and a Task object is returned. """ data = { 'service': service, 'action': task_name, 'account': account, } data.update(payload) response = self._perform_post_request(self.submit_endpoint, data, self.token_header) task = Task(uuid=response['task_id'], callback=callback) self._pending_tasks[task.uuid] = task return task
[ "def", "perform_task", "(", "self", ",", "service", ",", "task_name", ",", "account", ",", "payload", ",", "callback", "=", "None", ")", ":", "data", "=", "{", "'service'", ":", "service", ",", "'action'", ":", "task_name", ",", "'account'", ":", "account", ",", "}", "data", ".", "update", "(", "payload", ")", "response", "=", "self", ".", "_perform_post_request", "(", "self", ".", "submit_endpoint", ",", "data", ",", "self", ".", "token_header", ")", "task", "=", "Task", "(", "uuid", "=", "response", "[", "'task_id'", "]", ",", "callback", "=", "callback", ")", "self", ".", "_pending_tasks", "[", "task", ".", "uuid", "]", "=", "task", "return", "task" ]
Submit a task to the API. The task is executed asyncronously, and a Task object is returned.
[ "Submit", "a", "task", "to", "the", "API", ".", "The", "task", "is", "executed", "asyncronously", "and", "a", "Task", "object", "is", "returned", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L92-L108
train
reincubate/ricloud
ricloud/api.py
Api.task_status
def task_status(self, task_id): """Find the status of a task.""" data = { 'task_ids': task_id, } return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
python
def task_status(self, task_id): """Find the status of a task.""" data = { 'task_ids': task_id, } return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
[ "def", "task_status", "(", "self", ",", "task_id", ")", ":", "data", "=", "{", "'task_ids'", ":", "task_id", ",", "}", "return", "self", ".", "_perform_post_request", "(", "self", ".", "task_status_endpoint", ",", "data", ",", "self", ".", "token_header", ")" ]
Find the status of a task.
[ "Find", "the", "status", "of", "a", "task", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L110-L115
train
reincubate/ricloud
ricloud/api.py
Api.result_consumed
def result_consumed(self, task_id): """Report the result as successfully consumed.""" logger.debug('Sending result consumed message.') data = { 'task_ids': task_id, } return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
python
def result_consumed(self, task_id): """Report the result as successfully consumed.""" logger.debug('Sending result consumed message.') data = { 'task_ids': task_id, } return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
[ "def", "result_consumed", "(", "self", ",", "task_id", ")", ":", "logger", ".", "debug", "(", "'Sending result consumed message.'", ")", "data", "=", "{", "'task_ids'", ":", "task_id", ",", "}", "return", "self", ".", "_perform_post_request", "(", "self", ".", "results_consumed_endpoint", ",", "data", ",", "self", ".", "token_header", ")" ]
Report the result as successfully consumed.
[ "Report", "the", "result", "as", "successfully", "consumed", "." ]
e46bce4529fbdca34a4190c18c7219e937e2b697
https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L117-L123
train
nitmir/django-cas-server
cas_server/models.py
NewVersionWarning.send_mails
def send_mails(cls): """ For each new django-cas-server version, if the current instance is not up to date send one mail to ``settings.ADMINS``. """ if settings.CAS_NEW_VERSION_EMAIL_WARNING and settings.ADMINS: try: obj = cls.objects.get() except cls.DoesNotExist: obj = NewVersionWarning.objects.create(version=VERSION) LAST_VERSION = utils.last_version() if LAST_VERSION is not None and LAST_VERSION != obj.version: if utils.decode_version(VERSION) < utils.decode_version(LAST_VERSION): try: send_mail( ( '%sA new version of django-cas-server is available' ) % settings.EMAIL_SUBJECT_PREFIX, u''' A new version of the django-cas-server is available. Your version: %s New version: %s Upgrade using: * pip install -U django-cas-server * fetching the last release on https://github.com/nitmir/django-cas-server/ or on https://pypi.org/project/django-cas-server/ After upgrade, do not forget to run: * ./manage.py migrate * ./manage.py collectstatic and to reload your wsgi server (apache2, uwsgi, gunicord, etc…) --\u0020 django-cas-server '''.strip() % (VERSION, LAST_VERSION), settings.SERVER_EMAIL, ["%s <%s>" % admin for admin in settings.ADMINS], fail_silently=False, ) obj.version = LAST_VERSION obj.save() except smtplib.SMTPException as error: # pragma: no cover (should not happen) logger.error("Unable to send new version mail: %s" % error)
python
def send_mails(cls): """ For each new django-cas-server version, if the current instance is not up to date send one mail to ``settings.ADMINS``. """ if settings.CAS_NEW_VERSION_EMAIL_WARNING and settings.ADMINS: try: obj = cls.objects.get() except cls.DoesNotExist: obj = NewVersionWarning.objects.create(version=VERSION) LAST_VERSION = utils.last_version() if LAST_VERSION is not None and LAST_VERSION != obj.version: if utils.decode_version(VERSION) < utils.decode_version(LAST_VERSION): try: send_mail( ( '%sA new version of django-cas-server is available' ) % settings.EMAIL_SUBJECT_PREFIX, u''' A new version of the django-cas-server is available. Your version: %s New version: %s Upgrade using: * pip install -U django-cas-server * fetching the last release on https://github.com/nitmir/django-cas-server/ or on https://pypi.org/project/django-cas-server/ After upgrade, do not forget to run: * ./manage.py migrate * ./manage.py collectstatic and to reload your wsgi server (apache2, uwsgi, gunicord, etc…) --\u0020 django-cas-server '''.strip() % (VERSION, LAST_VERSION), settings.SERVER_EMAIL, ["%s <%s>" % admin for admin in settings.ADMINS], fail_silently=False, ) obj.version = LAST_VERSION obj.save() except smtplib.SMTPException as error: # pragma: no cover (should not happen) logger.error("Unable to send new version mail: %s" % error)
[ "def", "send_mails", "(", "cls", ")", ":", "if", "settings", ".", "CAS_NEW_VERSION_EMAIL_WARNING", "and", "settings", ".", "ADMINS", ":", "try", ":", "obj", "=", "cls", ".", "objects", ".", "get", "(", ")", "except", "cls", ".", "DoesNotExist", ":", "obj", "=", "NewVersionWarning", ".", "objects", ".", "create", "(", "version", "=", "VERSION", ")", "LAST_VERSION", "=", "utils", ".", "last_version", "(", ")", "if", "LAST_VERSION", "is", "not", "None", "and", "LAST_VERSION", "!=", "obj", ".", "version", ":", "if", "utils", ".", "decode_version", "(", "VERSION", ")", "<", "utils", ".", "decode_version", "(", "LAST_VERSION", ")", ":", "try", ":", "send_mail", "(", "(", "'%sA new version of django-cas-server is available'", ")", "%", "settings", ".", "EMAIL_SUBJECT_PREFIX", ",", "u'''\nA new version of the django-cas-server is available.\n\nYour version: %s\nNew version: %s\n\nUpgrade using:\n * pip install -U django-cas-server\n * fetching the last release on\n https://github.com/nitmir/django-cas-server/ or on\n https://pypi.org/project/django-cas-server/\n\nAfter upgrade, do not forget to run:\n * ./manage.py migrate\n * ./manage.py collectstatic\nand to reload your wsgi server (apache2, uwsgi, gunicord, etc…)\n\n--\\u0020\ndjango-cas-server\n'''", ".", "strip", "(", ")", "%", "(", "VERSION", ",", "LAST_VERSION", ")", ",", "settings", ".", "SERVER_EMAIL", ",", "[", "\"%s <%s>\"", "%", "admin", "for", "admin", "in", "settings", ".", "ADMINS", "]", ",", "fail_silently", "=", "False", ",", ")", "obj", ".", "version", "=", "LAST_VERSION", "obj", ".", "save", "(", ")", "except", "smtplib", ".", "SMTPException", "as", "error", ":", "# pragma: no cover (should not happen)", "logger", ".", "error", "(", "\"Unable to send new version mail: %s\"", "%", "error", ")" ]
For each new django-cas-server version, if the current instance is not up to date send one mail to ``settings.ADMINS``.
[ "For", "each", "new", "django", "-", "cas", "-", "server", "version", "if", "the", "current", "instance", "is", "not", "up", "to", "date", "send", "one", "mail", "to", "settings", ".", "ADMINS", "." ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/models.py#L1072-L1117
train
nitmir/django-cas-server
cas_server/cas.py
CASClientBase.get_proxy_url
def get_proxy_url(self, pgt): """Returns proxy url, given the proxy granting ticket""" params = urllib_parse.urlencode({'pgt': pgt, 'targetService': self.service_url}) return "%s/proxy?%s" % (self.server_url, params)
python
def get_proxy_url(self, pgt): """Returns proxy url, given the proxy granting ticket""" params = urllib_parse.urlencode({'pgt': pgt, 'targetService': self.service_url}) return "%s/proxy?%s" % (self.server_url, params)
[ "def", "get_proxy_url", "(", "self", ",", "pgt", ")", ":", "params", "=", "urllib_parse", ".", "urlencode", "(", "{", "'pgt'", ":", "pgt", ",", "'targetService'", ":", "self", ".", "service_url", "}", ")", "return", "\"%s/proxy?%s\"", "%", "(", "self", ".", "server_url", ",", "params", ")" ]
Returns proxy url, given the proxy granting ticket
[ "Returns", "proxy", "url", "given", "the", "proxy", "granting", "ticket" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/cas.py#L112-L115
train
nitmir/django-cas-server
cas_server/auth.py
LdapAuthUser.get_conn
def get_conn(cls): """Return a connection object to the ldap database""" conn = cls._conn if conn is None or conn.closed: conn = ldap3.Connection( settings.CAS_LDAP_SERVER, settings.CAS_LDAP_USER, settings.CAS_LDAP_PASSWORD, client_strategy="RESTARTABLE", auto_bind=True ) cls._conn = conn return conn
python
def get_conn(cls): """Return a connection object to the ldap database""" conn = cls._conn if conn is None or conn.closed: conn = ldap3.Connection( settings.CAS_LDAP_SERVER, settings.CAS_LDAP_USER, settings.CAS_LDAP_PASSWORD, client_strategy="RESTARTABLE", auto_bind=True ) cls._conn = conn return conn
[ "def", "get_conn", "(", "cls", ")", ":", "conn", "=", "cls", ".", "_conn", "if", "conn", "is", "None", "or", "conn", ".", "closed", ":", "conn", "=", "ldap3", ".", "Connection", "(", "settings", ".", "CAS_LDAP_SERVER", ",", "settings", ".", "CAS_LDAP_USER", ",", "settings", ".", "CAS_LDAP_PASSWORD", ",", "client_strategy", "=", "\"RESTARTABLE\"", ",", "auto_bind", "=", "True", ")", "cls", ".", "_conn", "=", "conn", "return", "conn" ]
Return a connection object to the ldap database
[ "Return", "a", "connection", "object", "to", "the", "ldap", "database" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/auth.py#L272-L284
train
nitmir/django-cas-server
cas_server/utils.py
json_encode
def json_encode(obj): """Encode a python object to json""" try: return json_encode.encoder.encode(obj) except AttributeError: json_encode.encoder = DjangoJSONEncoder(default=six.text_type) return json_encode(obj)
python
def json_encode(obj): """Encode a python object to json""" try: return json_encode.encoder.encode(obj) except AttributeError: json_encode.encoder = DjangoJSONEncoder(default=six.text_type) return json_encode(obj)
[ "def", "json_encode", "(", "obj", ")", ":", "try", ":", "return", "json_encode", ".", "encoder", ".", "encode", "(", "obj", ")", "except", "AttributeError", ":", "json_encode", ".", "encoder", "=", "DjangoJSONEncoder", "(", "default", "=", "six", ".", "text_type", ")", "return", "json_encode", "(", "obj", ")" ]
Encode a python object to json
[ "Encode", "a", "python", "object", "to", "json" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L50-L56
train
nitmir/django-cas-server
cas_server/utils.py
context
def context(params): """ Function that add somes variable to the context before template rendering :param dict params: The context dictionary used to render templates. :return: The ``params`` dictionary with the key ``settings`` set to :obj:`django.conf.settings`. :rtype: dict """ params["settings"] = settings params["message_levels"] = DEFAULT_MESSAGE_LEVELS if settings.CAS_NEW_VERSION_HTML_WARNING: LAST_VERSION = last_version() params["VERSION"] = VERSION params["LAST_VERSION"] = LAST_VERSION if LAST_VERSION is not None: params["upgrade_available"] = decode_version(VERSION) < decode_version(LAST_VERSION) else: params["upgrade_available"] = False if settings.CAS_INFO_MESSAGES_ORDER: params["CAS_INFO_RENDER"] = [] for msg_name in settings.CAS_INFO_MESSAGES_ORDER: if msg_name in settings.CAS_INFO_MESSAGES: if not isinstance(settings.CAS_INFO_MESSAGES[msg_name], dict): continue msg = settings.CAS_INFO_MESSAGES[msg_name].copy() if "message" in msg: msg["name"] = msg_name # use info as default infox type msg["type"] = msg.get("type", "info") # make box discardable by default msg["discardable"] = msg.get("discardable", True) msg_hash = ( six.text_type(msg["message"]).encode("utf-8") + msg["type"].encode("utf-8") ) # hash depend of the rendering language msg["hash"] = hashlib.md5(msg_hash).hexdigest() params["CAS_INFO_RENDER"].append(msg) return params
python
def context(params): """ Function that add somes variable to the context before template rendering :param dict params: The context dictionary used to render templates. :return: The ``params`` dictionary with the key ``settings`` set to :obj:`django.conf.settings`. :rtype: dict """ params["settings"] = settings params["message_levels"] = DEFAULT_MESSAGE_LEVELS if settings.CAS_NEW_VERSION_HTML_WARNING: LAST_VERSION = last_version() params["VERSION"] = VERSION params["LAST_VERSION"] = LAST_VERSION if LAST_VERSION is not None: params["upgrade_available"] = decode_version(VERSION) < decode_version(LAST_VERSION) else: params["upgrade_available"] = False if settings.CAS_INFO_MESSAGES_ORDER: params["CAS_INFO_RENDER"] = [] for msg_name in settings.CAS_INFO_MESSAGES_ORDER: if msg_name in settings.CAS_INFO_MESSAGES: if not isinstance(settings.CAS_INFO_MESSAGES[msg_name], dict): continue msg = settings.CAS_INFO_MESSAGES[msg_name].copy() if "message" in msg: msg["name"] = msg_name # use info as default infox type msg["type"] = msg.get("type", "info") # make box discardable by default msg["discardable"] = msg.get("discardable", True) msg_hash = ( six.text_type(msg["message"]).encode("utf-8") + msg["type"].encode("utf-8") ) # hash depend of the rendering language msg["hash"] = hashlib.md5(msg_hash).hexdigest() params["CAS_INFO_RENDER"].append(msg) return params
[ "def", "context", "(", "params", ")", ":", "params", "[", "\"settings\"", "]", "=", "settings", "params", "[", "\"message_levels\"", "]", "=", "DEFAULT_MESSAGE_LEVELS", "if", "settings", ".", "CAS_NEW_VERSION_HTML_WARNING", ":", "LAST_VERSION", "=", "last_version", "(", ")", "params", "[", "\"VERSION\"", "]", "=", "VERSION", "params", "[", "\"LAST_VERSION\"", "]", "=", "LAST_VERSION", "if", "LAST_VERSION", "is", "not", "None", ":", "params", "[", "\"upgrade_available\"", "]", "=", "decode_version", "(", "VERSION", ")", "<", "decode_version", "(", "LAST_VERSION", ")", "else", ":", "params", "[", "\"upgrade_available\"", "]", "=", "False", "if", "settings", ".", "CAS_INFO_MESSAGES_ORDER", ":", "params", "[", "\"CAS_INFO_RENDER\"", "]", "=", "[", "]", "for", "msg_name", "in", "settings", ".", "CAS_INFO_MESSAGES_ORDER", ":", "if", "msg_name", "in", "settings", ".", "CAS_INFO_MESSAGES", ":", "if", "not", "isinstance", "(", "settings", ".", "CAS_INFO_MESSAGES", "[", "msg_name", "]", ",", "dict", ")", ":", "continue", "msg", "=", "settings", ".", "CAS_INFO_MESSAGES", "[", "msg_name", "]", ".", "copy", "(", ")", "if", "\"message\"", "in", "msg", ":", "msg", "[", "\"name\"", "]", "=", "msg_name", "# use info as default infox type", "msg", "[", "\"type\"", "]", "=", "msg", ".", "get", "(", "\"type\"", ",", "\"info\"", ")", "# make box discardable by default", "msg", "[", "\"discardable\"", "]", "=", "msg", ".", "get", "(", "\"discardable\"", ",", "True", ")", "msg_hash", "=", "(", "six", ".", "text_type", "(", "msg", "[", "\"message\"", "]", ")", ".", "encode", "(", "\"utf-8\"", ")", "+", "msg", "[", "\"type\"", "]", ".", "encode", "(", "\"utf-8\"", ")", ")", "# hash depend of the rendering language", "msg", "[", "\"hash\"", "]", "=", "hashlib", ".", "md5", "(", "msg_hash", ")", ".", "hexdigest", "(", ")", "params", "[", "\"CAS_INFO_RENDER\"", "]", ".", "append", "(", "msg", ")", "return", "params" ]
Function that add somes variable to the context before template rendering :param dict params: The context dictionary used to render templates. :return: The ``params`` dictionary with the key ``settings`` set to :obj:`django.conf.settings`. :rtype: dict
[ "Function", "that", "add", "somes", "variable", "to", "the", "context", "before", "template", "rendering" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L59-L100
train
nitmir/django-cas-server
cas_server/utils.py
json_response
def json_response(request, data): """ Wrapper dumping `data` to a json and sending it to the user with an HttpResponse :param django.http.HttpRequest request: The request object used to generate this response. :param dict data: The python dictionnary to return as a json :return: The content of ``data`` serialized in json :rtype: django.http.HttpResponse """ data["messages"] = [] for msg in messages.get_messages(request): data["messages"].append({'message': msg.message, 'level': msg.level_tag}) return HttpResponse(json.dumps(data), content_type="application/json")
python
def json_response(request, data): """ Wrapper dumping `data` to a json and sending it to the user with an HttpResponse :param django.http.HttpRequest request: The request object used to generate this response. :param dict data: The python dictionnary to return as a json :return: The content of ``data`` serialized in json :rtype: django.http.HttpResponse """ data["messages"] = [] for msg in messages.get_messages(request): data["messages"].append({'message': msg.message, 'level': msg.level_tag}) return HttpResponse(json.dumps(data), content_type="application/json")
[ "def", "json_response", "(", "request", ",", "data", ")", ":", "data", "[", "\"messages\"", "]", "=", "[", "]", "for", "msg", "in", "messages", ".", "get_messages", "(", "request", ")", ":", "data", "[", "\"messages\"", "]", ".", "append", "(", "{", "'message'", ":", "msg", ".", "message", ",", "'level'", ":", "msg", ".", "level_tag", "}", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "data", ")", ",", "content_type", "=", "\"application/json\"", ")" ]
Wrapper dumping `data` to a json and sending it to the user with an HttpResponse :param django.http.HttpRequest request: The request object used to generate this response. :param dict data: The python dictionnary to return as a json :return: The content of ``data`` serialized in json :rtype: django.http.HttpResponse
[ "Wrapper", "dumping", "data", "to", "a", "json", "and", "sending", "it", "to", "the", "user", "with", "an", "HttpResponse" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L103-L115
train
nitmir/django-cas-server
cas_server/utils.py
import_attr
def import_attr(path): """ transform a python dotted path to the attr :param path: A dotted path to a python object or a python object :type path: :obj:`unicode` or :obj:`str` or anything :return: The python object pointed by the dotted path or the python object unchanged """ # if we got a str, decode it to unicode (normally it should only contain ascii) if isinstance(path, six.binary_type): path = path.decode("utf-8") # if path is not an unicode, return it unchanged (may be it is already the attribute to import) if not isinstance(path, six.text_type): return path if u"." not in path: ValueError("%r should be of the form `module.attr` and we just got `attr`" % path) module, attr = path.rsplit(u'.', 1) try: return getattr(import_module(module), attr) except ImportError: raise ImportError("Module %r not found" % module) except AttributeError: raise AttributeError("Module %r has not attribut %r" % (module, attr))
python
def import_attr(path): """ transform a python dotted path to the attr :param path: A dotted path to a python object or a python object :type path: :obj:`unicode` or :obj:`str` or anything :return: The python object pointed by the dotted path or the python object unchanged """ # if we got a str, decode it to unicode (normally it should only contain ascii) if isinstance(path, six.binary_type): path = path.decode("utf-8") # if path is not an unicode, return it unchanged (may be it is already the attribute to import) if not isinstance(path, six.text_type): return path if u"." not in path: ValueError("%r should be of the form `module.attr` and we just got `attr`" % path) module, attr = path.rsplit(u'.', 1) try: return getattr(import_module(module), attr) except ImportError: raise ImportError("Module %r not found" % module) except AttributeError: raise AttributeError("Module %r has not attribut %r" % (module, attr))
[ "def", "import_attr", "(", "path", ")", ":", "# if we got a str, decode it to unicode (normally it should only contain ascii)", "if", "isinstance", "(", "path", ",", "six", ".", "binary_type", ")", ":", "path", "=", "path", ".", "decode", "(", "\"utf-8\"", ")", "# if path is not an unicode, return it unchanged (may be it is already the attribute to import)", "if", "not", "isinstance", "(", "path", ",", "six", ".", "text_type", ")", ":", "return", "path", "if", "u\".\"", "not", "in", "path", ":", "ValueError", "(", "\"%r should be of the form `module.attr` and we just got `attr`\"", "%", "path", ")", "module", ",", "attr", "=", "path", ".", "rsplit", "(", "u'.'", ",", "1", ")", "try", ":", "return", "getattr", "(", "import_module", "(", "module", ")", ",", "attr", ")", "except", "ImportError", ":", "raise", "ImportError", "(", "\"Module %r not found\"", "%", "module", ")", "except", "AttributeError", ":", "raise", "AttributeError", "(", "\"Module %r has not attribut %r\"", "%", "(", "module", ",", "attr", ")", ")" ]
transform a python dotted path to the attr :param path: A dotted path to a python object or a python object :type path: :obj:`unicode` or :obj:`str` or anything :return: The python object pointed by the dotted path or the python object unchanged
[ "transform", "a", "python", "dotted", "path", "to", "the", "attr" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L118-L140
train
nitmir/django-cas-server
cas_server/utils.py
redirect_params
def redirect_params(url_name, params=None): """ Redirect to ``url_name`` with ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :return: A redirection to the URL with name ``url_name`` with ``params`` as querystring. :rtype: django.http.HttpResponseRedirect """ url = reverse(url_name) params = urlencode(params if params else {}) return HttpResponseRedirect(url + "?%s" % params)
python
def redirect_params(url_name, params=None): """ Redirect to ``url_name`` with ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :return: A redirection to the URL with name ``url_name`` with ``params`` as querystring. :rtype: django.http.HttpResponseRedirect """ url = reverse(url_name) params = urlencode(params if params else {}) return HttpResponseRedirect(url + "?%s" % params)
[ "def", "redirect_params", "(", "url_name", ",", "params", "=", "None", ")", ":", "url", "=", "reverse", "(", "url_name", ")", "params", "=", "urlencode", "(", "params", "if", "params", "else", "{", "}", ")", "return", "HttpResponseRedirect", "(", "url", "+", "\"?%s\"", "%", "params", ")" ]
Redirect to ``url_name`` with ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :return: A redirection to the URL with name ``url_name`` with ``params`` as querystring. :rtype: django.http.HttpResponseRedirect
[ "Redirect", "to", "url_name", "with", "params", "as", "querystring" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L143-L155
train
nitmir/django-cas-server
cas_server/utils.py
reverse_params
def reverse_params(url_name, params=None, **kwargs): """ compute the reverse url of ``url_name`` and add to it parameters from ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :param **kwargs: additional parameters needed to compure the reverse URL :return: The computed reverse URL of ``url_name`` with possible querystring from ``params`` :rtype: unicode """ url = reverse(url_name, **kwargs) params = urlencode(params if params else {}) if params: return u"%s?%s" % (url, params) else: return url
python
def reverse_params(url_name, params=None, **kwargs): """ compute the reverse url of ``url_name`` and add to it parameters from ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :param **kwargs: additional parameters needed to compure the reverse URL :return: The computed reverse URL of ``url_name`` with possible querystring from ``params`` :rtype: unicode """ url = reverse(url_name, **kwargs) params = urlencode(params if params else {}) if params: return u"%s?%s" % (url, params) else: return url
[ "def", "reverse_params", "(", "url_name", ",", "params", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", "=", "reverse", "(", "url_name", ",", "*", "*", "kwargs", ")", "params", "=", "urlencode", "(", "params", "if", "params", "else", "{", "}", ")", "if", "params", ":", "return", "u\"%s?%s\"", "%", "(", "url", ",", "params", ")", "else", ":", "return", "url" ]
compute the reverse url of ``url_name`` and add to it parameters from ``params`` as querystring :param unicode url_name: a URL pattern name :param params: Some parameter to append to the reversed URL :type params: :obj:`dict` or :obj:`NoneType<types.NoneType>` :param **kwargs: additional parameters needed to compure the reverse URL :return: The computed reverse URL of ``url_name`` with possible querystring from ``params`` :rtype: unicode
[ "compute", "the", "reverse", "url", "of", "url_name", "and", "add", "to", "it", "parameters", "from", "params", "as", "querystring" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L158-L175
train
nitmir/django-cas-server
cas_server/utils.py
set_cookie
def set_cookie(response, key, value, max_age): """ Set the cookie ``key`` on ``response`` with value ``value`` valid for ``max_age`` secondes :param django.http.HttpResponse response: a django response where to set the cookie :param unicode key: the cookie key :param unicode value: the cookie value :param int max_age: the maximum validity age of the cookie """ expires = datetime.strftime( datetime.utcnow() + timedelta(seconds=max_age), "%a, %d-%b-%Y %H:%M:%S GMT" ) response.set_cookie( key, value, max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, secure=settings.SESSION_COOKIE_SECURE or None )
python
def set_cookie(response, key, value, max_age): """ Set the cookie ``key`` on ``response`` with value ``value`` valid for ``max_age`` secondes :param django.http.HttpResponse response: a django response where to set the cookie :param unicode key: the cookie key :param unicode value: the cookie value :param int max_age: the maximum validity age of the cookie """ expires = datetime.strftime( datetime.utcnow() + timedelta(seconds=max_age), "%a, %d-%b-%Y %H:%M:%S GMT" ) response.set_cookie( key, value, max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, secure=settings.SESSION_COOKIE_SECURE or None )
[ "def", "set_cookie", "(", "response", ",", "key", ",", "value", ",", "max_age", ")", ":", "expires", "=", "datetime", ".", "strftime", "(", "datetime", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "max_age", ")", ",", "\"%a, %d-%b-%Y %H:%M:%S GMT\"", ")", "response", ".", "set_cookie", "(", "key", ",", "value", ",", "max_age", "=", "max_age", ",", "expires", "=", "expires", ",", "domain", "=", "settings", ".", "SESSION_COOKIE_DOMAIN", ",", "secure", "=", "settings", ".", "SESSION_COOKIE_SECURE", "or", "None", ")" ]
Set the cookie ``key`` on ``response`` with value ``value`` valid for ``max_age`` secondes :param django.http.HttpResponse response: a django response where to set the cookie :param unicode key: the cookie key :param unicode value: the cookie value :param int max_age: the maximum validity age of the cookie
[ "Set", "the", "cookie", "key", "on", "response", "with", "value", "value", "valid", "for", "max_age", "secondes" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L197-L217
train
nitmir/django-cas-server
cas_server/utils.py
get_current_url
def get_current_url(request, ignore_params=None): """ Giving a django request, return the current http url, possibly ignoring some GET parameters :param django.http.HttpRequest request: The current request object. :param set ignore_params: An optional set of GET parameters to ignore :return: The URL of the current page, possibly omitting some parameters from ``ignore_params`` in the querystring. :rtype: unicode """ if ignore_params is None: ignore_params = set() protocol = u'https' if request.is_secure() else u"http" service_url = u"%s://%s%s" % (protocol, request.get_host(), request.path) if request.GET: params = copy_params(request.GET, ignore_params) if params: service_url += u"?%s" % urlencode(params) return service_url
python
def get_current_url(request, ignore_params=None): """ Giving a django request, return the current http url, possibly ignoring some GET parameters :param django.http.HttpRequest request: The current request object. :param set ignore_params: An optional set of GET parameters to ignore :return: The URL of the current page, possibly omitting some parameters from ``ignore_params`` in the querystring. :rtype: unicode """ if ignore_params is None: ignore_params = set() protocol = u'https' if request.is_secure() else u"http" service_url = u"%s://%s%s" % (protocol, request.get_host(), request.path) if request.GET: params = copy_params(request.GET, ignore_params) if params: service_url += u"?%s" % urlencode(params) return service_url
[ "def", "get_current_url", "(", "request", ",", "ignore_params", "=", "None", ")", ":", "if", "ignore_params", "is", "None", ":", "ignore_params", "=", "set", "(", ")", "protocol", "=", "u'https'", "if", "request", ".", "is_secure", "(", ")", "else", "u\"http\"", "service_url", "=", "u\"%s://%s%s\"", "%", "(", "protocol", ",", "request", ".", "get_host", "(", ")", ",", "request", ".", "path", ")", "if", "request", ".", "GET", ":", "params", "=", "copy_params", "(", "request", ".", "GET", ",", "ignore_params", ")", "if", "params", ":", "service_url", "+=", "u\"?%s\"", "%", "urlencode", "(", "params", ")", "return", "service_url" ]
Giving a django request, return the current http url, possibly ignoring some GET parameters :param django.http.HttpRequest request: The current request object. :param set ignore_params: An optional set of GET parameters to ignore :return: The URL of the current page, possibly omitting some parameters from ``ignore_params`` in the querystring. :rtype: unicode
[ "Giving", "a", "django", "request", "return", "the", "current", "http", "url", "possibly", "ignoring", "some", "GET", "parameters" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L220-L238
train
nitmir/django-cas-server
cas_server/utils.py
update_url
def update_url(url, params): """ update parameters using ``params`` in the ``url`` query string :param url: An URL possibily with a querystring :type url: :obj:`unicode` or :obj:`str` :param dict params: A dictionary of parameters for updating the url querystring :return: The URL with an updated querystring :rtype: unicode """ if not isinstance(url, bytes): url = url.encode('utf-8') for key, value in list(params.items()): if not isinstance(key, bytes): del params[key] key = key.encode('utf-8') if not isinstance(value, bytes): value = value.encode('utf-8') params[key] = value url_parts = list(urlparse(url)) query = dict(parse_qsl(url_parts[4])) query.update(params) # make the params order deterministic query = list(query.items()) query.sort() url_query = urlencode(query) if not isinstance(url_query, bytes): # pragma: no cover in python3 urlencode return an unicode url_query = url_query.encode("utf-8") url_parts[4] = url_query return urlunparse(url_parts).decode('utf-8')
python
def update_url(url, params): """ update parameters using ``params`` in the ``url`` query string :param url: An URL possibily with a querystring :type url: :obj:`unicode` or :obj:`str` :param dict params: A dictionary of parameters for updating the url querystring :return: The URL with an updated querystring :rtype: unicode """ if not isinstance(url, bytes): url = url.encode('utf-8') for key, value in list(params.items()): if not isinstance(key, bytes): del params[key] key = key.encode('utf-8') if not isinstance(value, bytes): value = value.encode('utf-8') params[key] = value url_parts = list(urlparse(url)) query = dict(parse_qsl(url_parts[4])) query.update(params) # make the params order deterministic query = list(query.items()) query.sort() url_query = urlencode(query) if not isinstance(url_query, bytes): # pragma: no cover in python3 urlencode return an unicode url_query = url_query.encode("utf-8") url_parts[4] = url_query return urlunparse(url_parts).decode('utf-8')
[ "def", "update_url", "(", "url", ",", "params", ")", ":", "if", "not", "isinstance", "(", "url", ",", "bytes", ")", ":", "url", "=", "url", ".", "encode", "(", "'utf-8'", ")", "for", "key", ",", "value", "in", "list", "(", "params", ".", "items", "(", ")", ")", ":", "if", "not", "isinstance", "(", "key", ",", "bytes", ")", ":", "del", "params", "[", "key", "]", "key", "=", "key", ".", "encode", "(", "'utf-8'", ")", "if", "not", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "encode", "(", "'utf-8'", ")", "params", "[", "key", "]", "=", "value", "url_parts", "=", "list", "(", "urlparse", "(", "url", ")", ")", "query", "=", "dict", "(", "parse_qsl", "(", "url_parts", "[", "4", "]", ")", ")", "query", ".", "update", "(", "params", ")", "# make the params order deterministic", "query", "=", "list", "(", "query", ".", "items", "(", ")", ")", "query", ".", "sort", "(", ")", "url_query", "=", "urlencode", "(", "query", ")", "if", "not", "isinstance", "(", "url_query", ",", "bytes", ")", ":", "# pragma: no cover in python3 urlencode return an unicode", "url_query", "=", "url_query", ".", "encode", "(", "\"utf-8\"", ")", "url_parts", "[", "4", "]", "=", "url_query", "return", "urlunparse", "(", "url_parts", ")", ".", "decode", "(", "'utf-8'", ")" ]
update parameters using ``params`` in the ``url`` query string :param url: An URL possibily with a querystring :type url: :obj:`unicode` or :obj:`str` :param dict params: A dictionary of parameters for updating the url querystring :return: The URL with an updated querystring :rtype: unicode
[ "update", "parameters", "using", "params", "in", "the", "url", "query", "string" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L241-L270
train
nitmir/django-cas-server
cas_server/utils.py
unpack_nested_exception
def unpack_nested_exception(error): """ If exception are stacked, return the first one :param error: A python exception with possible exception embeded within :return: A python exception with no exception embeded within """ i = 0 while True: if error.args[i:]: if isinstance(error.args[i], Exception): error = error.args[i] i = 0 else: i += 1 else: break return error
python
def unpack_nested_exception(error): """ If exception are stacked, return the first one :param error: A python exception with possible exception embeded within :return: A python exception with no exception embeded within """ i = 0 while True: if error.args[i:]: if isinstance(error.args[i], Exception): error = error.args[i] i = 0 else: i += 1 else: break return error
[ "def", "unpack_nested_exception", "(", "error", ")", ":", "i", "=", "0", "while", "True", ":", "if", "error", ".", "args", "[", "i", ":", "]", ":", "if", "isinstance", "(", "error", ".", "args", "[", "i", "]", ",", "Exception", ")", ":", "error", "=", "error", ".", "args", "[", "i", "]", "i", "=", "0", "else", ":", "i", "+=", "1", "else", ":", "break", "return", "error" ]
If exception are stacked, return the first one :param error: A python exception with possible exception embeded within :return: A python exception with no exception embeded within
[ "If", "exception", "are", "stacked", "return", "the", "first", "one" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L273-L290
train
nitmir/django-cas-server
cas_server/utils.py
_gen_ticket
def _gen_ticket(prefix=None, lg=settings.CAS_TICKET_LEN): """ Generate a ticket with prefix ``prefix`` and length ``lg`` :param unicode prefix: An optional prefix (probably ST, PT, PGT or PGTIOU) :param int lg: The length of the generated ticket (with the prefix) :return: A randomlly generated ticket of length ``lg`` :rtype: unicode """ random_part = u''.join( random.choice( string.ascii_letters + string.digits ) for _ in range(lg - len(prefix or "") - 1) ) if prefix is not None: return u'%s-%s' % (prefix, random_part) else: return random_part
python
def _gen_ticket(prefix=None, lg=settings.CAS_TICKET_LEN): """ Generate a ticket with prefix ``prefix`` and length ``lg`` :param unicode prefix: An optional prefix (probably ST, PT, PGT or PGTIOU) :param int lg: The length of the generated ticket (with the prefix) :return: A randomlly generated ticket of length ``lg`` :rtype: unicode """ random_part = u''.join( random.choice( string.ascii_letters + string.digits ) for _ in range(lg - len(prefix or "") - 1) ) if prefix is not None: return u'%s-%s' % (prefix, random_part) else: return random_part
[ "def", "_gen_ticket", "(", "prefix", "=", "None", ",", "lg", "=", "settings", ".", "CAS_TICKET_LEN", ")", ":", "random_part", "=", "u''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "for", "_", "in", "range", "(", "lg", "-", "len", "(", "prefix", "or", "\"\"", ")", "-", "1", ")", ")", "if", "prefix", "is", "not", "None", ":", "return", "u'%s-%s'", "%", "(", "prefix", ",", "random_part", ")", "else", ":", "return", "random_part" ]
Generate a ticket with prefix ``prefix`` and length ``lg`` :param unicode prefix: An optional prefix (probably ST, PT, PGT or PGTIOU) :param int lg: The length of the generated ticket (with the prefix) :return: A randomlly generated ticket of length ``lg`` :rtype: unicode
[ "Generate", "a", "ticket", "with", "prefix", "prefix", "and", "length", "lg" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L293-L310
train
nitmir/django-cas-server
cas_server/utils.py
crypt_salt_is_valid
def crypt_salt_is_valid(salt): """ Validate a salt as crypt salt :param str salt: a password salt :return: ``True`` if ``salt`` is a valid crypt salt on this system, ``False`` otherwise :rtype: bool """ if len(salt) < 2: return False else: if salt[0] == '$': if salt[1] == '$': return False else: if '$' not in salt[1:]: return False else: hashed = crypt.crypt("", salt) if not hashed or '$' not in hashed[1:]: return False else: return True else: return True
python
def crypt_salt_is_valid(salt): """ Validate a salt as crypt salt :param str salt: a password salt :return: ``True`` if ``salt`` is a valid crypt salt on this system, ``False`` otherwise :rtype: bool """ if len(salt) < 2: return False else: if salt[0] == '$': if salt[1] == '$': return False else: if '$' not in salt[1:]: return False else: hashed = crypt.crypt("", salt) if not hashed or '$' not in hashed[1:]: return False else: return True else: return True
[ "def", "crypt_salt_is_valid", "(", "salt", ")", ":", "if", "len", "(", "salt", ")", "<", "2", ":", "return", "False", "else", ":", "if", "salt", "[", "0", "]", "==", "'$'", ":", "if", "salt", "[", "1", "]", "==", "'$'", ":", "return", "False", "else", ":", "if", "'$'", "not", "in", "salt", "[", "1", ":", "]", ":", "return", "False", "else", ":", "hashed", "=", "crypt", ".", "crypt", "(", "\"\"", ",", "salt", ")", "if", "not", "hashed", "or", "'$'", "not", "in", "hashed", "[", "1", ":", "]", ":", "return", "False", "else", ":", "return", "True", "else", ":", "return", "True" ]
Validate a salt as crypt salt :param str salt: a password salt :return: ``True`` if ``salt`` is a valid crypt salt on this system, ``False`` otherwise :rtype: bool
[ "Validate", "a", "salt", "as", "crypt", "salt" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L393-L417
train
nitmir/django-cas-server
cas_server/utils.py
check_password
def check_password(method, password, hashed_password, charset): """ Check that ``password`` match `hashed_password` using ``method``, assuming the encoding is ``charset``. :param str method: on of ``"crypt"``, ``"ldap"``, ``"hex_md5"``, ``"hex_sha1"``, ``"hex_sha224"``, ``"hex_sha256"``, ``"hex_sha384"``, ``"hex_sha512"``, ``"plain"`` :param password: The user inputed password :type password: :obj:`str` or :obj:`unicode` :param hashed_password: The hashed password as stored in the database :type hashed_password: :obj:`str` or :obj:`unicode` :param str charset: The used char encoding (also used internally, so it must be valid for the charset used by ``password`` when it was initially ) :return: True if ``password`` match ``hashed_password`` using ``method``, ``False`` otherwise :rtype: bool """ if not isinstance(password, six.binary_type): password = password.encode(charset) if not isinstance(hashed_password, six.binary_type): hashed_password = hashed_password.encode(charset) if method == "plain": return password == hashed_password elif method == "crypt": if hashed_password.startswith(b'$'): salt = b'$'.join(hashed_password.split(b'$', 3)[:-1]) elif hashed_password.startswith(b'_'): # pragma: no cover old BSD format not supported salt = hashed_password[:9] else: salt = hashed_password[:2] if six.PY3: password = password.decode(charset) salt = salt.decode(charset) hashed_password = hashed_password.decode(charset) if not crypt_salt_is_valid(salt): raise ValueError("System crypt implementation do not support the salt %r" % salt) crypted_password = crypt.crypt(password, salt) return crypted_password == hashed_password elif method == "ldap": scheme = LdapHashUserPassword.get_scheme(hashed_password) salt = LdapHashUserPassword.get_salt(hashed_password) return LdapHashUserPassword.hash(scheme, password, salt, charset=charset) == hashed_password elif ( method.startswith("hex_") and method[4:] in {"md5", "sha1", "sha224", "sha256", "sha384", "sha512"} ): return getattr( hashlib, method[4:] )(password).hexdigest().encode("ascii") == hashed_password.lower() else: raise ValueError("Unknown password method check %r" % method)
python
def check_password(method, password, hashed_password, charset): """ Check that ``password`` match `hashed_password` using ``method``, assuming the encoding is ``charset``. :param str method: on of ``"crypt"``, ``"ldap"``, ``"hex_md5"``, ``"hex_sha1"``, ``"hex_sha224"``, ``"hex_sha256"``, ``"hex_sha384"``, ``"hex_sha512"``, ``"plain"`` :param password: The user inputed password :type password: :obj:`str` or :obj:`unicode` :param hashed_password: The hashed password as stored in the database :type hashed_password: :obj:`str` or :obj:`unicode` :param str charset: The used char encoding (also used internally, so it must be valid for the charset used by ``password`` when it was initially ) :return: True if ``password`` match ``hashed_password`` using ``method``, ``False`` otherwise :rtype: bool """ if not isinstance(password, six.binary_type): password = password.encode(charset) if not isinstance(hashed_password, six.binary_type): hashed_password = hashed_password.encode(charset) if method == "plain": return password == hashed_password elif method == "crypt": if hashed_password.startswith(b'$'): salt = b'$'.join(hashed_password.split(b'$', 3)[:-1]) elif hashed_password.startswith(b'_'): # pragma: no cover old BSD format not supported salt = hashed_password[:9] else: salt = hashed_password[:2] if six.PY3: password = password.decode(charset) salt = salt.decode(charset) hashed_password = hashed_password.decode(charset) if not crypt_salt_is_valid(salt): raise ValueError("System crypt implementation do not support the salt %r" % salt) crypted_password = crypt.crypt(password, salt) return crypted_password == hashed_password elif method == "ldap": scheme = LdapHashUserPassword.get_scheme(hashed_password) salt = LdapHashUserPassword.get_salt(hashed_password) return LdapHashUserPassword.hash(scheme, password, salt, charset=charset) == hashed_password elif ( method.startswith("hex_") and method[4:] in {"md5", "sha1", "sha224", "sha256", "sha384", "sha512"} ): return getattr( hashlib, method[4:] )(password).hexdigest().encode("ascii") == hashed_password.lower() else: raise ValueError("Unknown password method check %r" % method)
[ "def", "check_password", "(", "method", ",", "password", ",", "hashed_password", ",", "charset", ")", ":", "if", "not", "isinstance", "(", "password", ",", "six", ".", "binary_type", ")", ":", "password", "=", "password", ".", "encode", "(", "charset", ")", "if", "not", "isinstance", "(", "hashed_password", ",", "six", ".", "binary_type", ")", ":", "hashed_password", "=", "hashed_password", ".", "encode", "(", "charset", ")", "if", "method", "==", "\"plain\"", ":", "return", "password", "==", "hashed_password", "elif", "method", "==", "\"crypt\"", ":", "if", "hashed_password", ".", "startswith", "(", "b'$'", ")", ":", "salt", "=", "b'$'", ".", "join", "(", "hashed_password", ".", "split", "(", "b'$'", ",", "3", ")", "[", ":", "-", "1", "]", ")", "elif", "hashed_password", ".", "startswith", "(", "b'_'", ")", ":", "# pragma: no cover old BSD format not supported", "salt", "=", "hashed_password", "[", ":", "9", "]", "else", ":", "salt", "=", "hashed_password", "[", ":", "2", "]", "if", "six", ".", "PY3", ":", "password", "=", "password", ".", "decode", "(", "charset", ")", "salt", "=", "salt", ".", "decode", "(", "charset", ")", "hashed_password", "=", "hashed_password", ".", "decode", "(", "charset", ")", "if", "not", "crypt_salt_is_valid", "(", "salt", ")", ":", "raise", "ValueError", "(", "\"System crypt implementation do not support the salt %r\"", "%", "salt", ")", "crypted_password", "=", "crypt", ".", "crypt", "(", "password", ",", "salt", ")", "return", "crypted_password", "==", "hashed_password", "elif", "method", "==", "\"ldap\"", ":", "scheme", "=", "LdapHashUserPassword", ".", "get_scheme", "(", "hashed_password", ")", "salt", "=", "LdapHashUserPassword", ".", "get_salt", "(", "hashed_password", ")", "return", "LdapHashUserPassword", ".", "hash", "(", "scheme", ",", "password", ",", "salt", ",", "charset", "=", "charset", ")", "==", "hashed_password", "elif", "(", "method", ".", "startswith", "(", "\"hex_\"", ")", "and", "method", "[", "4", ":", "]", "in", "{", "\"md5\"", ",", "\"sha1\"", ",", "\"sha224\"", ",", "\"sha256\"", ",", "\"sha384\"", ",", "\"sha512\"", "}", ")", ":", "return", "getattr", "(", "hashlib", ",", "method", "[", "4", ":", "]", ")", "(", "password", ")", ".", "hexdigest", "(", ")", ".", "encode", "(", "\"ascii\"", ")", "==", "hashed_password", ".", "lower", "(", ")", "else", ":", "raise", "ValueError", "(", "\"Unknown password method check %r\"", "%", "method", ")" ]
Check that ``password`` match `hashed_password` using ``method``, assuming the encoding is ``charset``. :param str method: on of ``"crypt"``, ``"ldap"``, ``"hex_md5"``, ``"hex_sha1"``, ``"hex_sha224"``, ``"hex_sha256"``, ``"hex_sha384"``, ``"hex_sha512"``, ``"plain"`` :param password: The user inputed password :type password: :obj:`str` or :obj:`unicode` :param hashed_password: The hashed password as stored in the database :type hashed_password: :obj:`str` or :obj:`unicode` :param str charset: The used char encoding (also used internally, so it must be valid for the charset used by ``password`` when it was initially ) :return: True if ``password`` match ``hashed_password`` using ``method``, ``False`` otherwise :rtype: bool
[ "Check", "that", "password", "match", "hashed_password", "using", "method", "assuming", "the", "encoding", "is", "charset", "." ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L607-L658
train
nitmir/django-cas-server
cas_server/utils.py
last_version
def last_version(): """ Fetch the last version from pypi and return it. On successful fetch from pypi, the response is cached 24h, on error, it is cached 10 min. :return: the last django-cas-server version :rtype: unicode """ try: last_update, version, success = last_version._cache except AttributeError: last_update = 0 version = None success = False cache_delta = 24 * 3600 if success else 600 if (time.time() - last_update) < cache_delta: return version else: try: req = requests.get(settings.CAS_NEW_VERSION_JSON_URL) data = json.loads(req.text) version = data["info"]["version"] last_version._cache = (time.time(), version, True) return version except ( KeyError, ValueError, requests.exceptions.RequestException ) as error: # pragma: no cover (should not happen unless pypi is not available) logger.error( "Unable to fetch %s: %s" % (settings.CAS_NEW_VERSION_JSON_URL, error) ) last_version._cache = (time.time(), version, False)
python
def last_version(): """ Fetch the last version from pypi and return it. On successful fetch from pypi, the response is cached 24h, on error, it is cached 10 min. :return: the last django-cas-server version :rtype: unicode """ try: last_update, version, success = last_version._cache except AttributeError: last_update = 0 version = None success = False cache_delta = 24 * 3600 if success else 600 if (time.time() - last_update) < cache_delta: return version else: try: req = requests.get(settings.CAS_NEW_VERSION_JSON_URL) data = json.loads(req.text) version = data["info"]["version"] last_version._cache = (time.time(), version, True) return version except ( KeyError, ValueError, requests.exceptions.RequestException ) as error: # pragma: no cover (should not happen unless pypi is not available) logger.error( "Unable to fetch %s: %s" % (settings.CAS_NEW_VERSION_JSON_URL, error) ) last_version._cache = (time.time(), version, False)
[ "def", "last_version", "(", ")", ":", "try", ":", "last_update", ",", "version", ",", "success", "=", "last_version", ".", "_cache", "except", "AttributeError", ":", "last_update", "=", "0", "version", "=", "None", "success", "=", "False", "cache_delta", "=", "24", "*", "3600", "if", "success", "else", "600", "if", "(", "time", ".", "time", "(", ")", "-", "last_update", ")", "<", "cache_delta", ":", "return", "version", "else", ":", "try", ":", "req", "=", "requests", ".", "get", "(", "settings", ".", "CAS_NEW_VERSION_JSON_URL", ")", "data", "=", "json", ".", "loads", "(", "req", ".", "text", ")", "version", "=", "data", "[", "\"info\"", "]", "[", "\"version\"", "]", "last_version", ".", "_cache", "=", "(", "time", ".", "time", "(", ")", ",", "version", ",", "True", ")", "return", "version", "except", "(", "KeyError", ",", "ValueError", ",", "requests", ".", "exceptions", ".", "RequestException", ")", "as", "error", ":", "# pragma: no cover (should not happen unless pypi is not available)", "logger", ".", "error", "(", "\"Unable to fetch %s: %s\"", "%", "(", "settings", ".", "CAS_NEW_VERSION_JSON_URL", ",", "error", ")", ")", "last_version", ".", "_cache", "=", "(", "time", ".", "time", "(", ")", ",", "version", ",", "False", ")" ]
Fetch the last version from pypi and return it. On successful fetch from pypi, the response is cached 24h, on error, it is cached 10 min. :return: the last django-cas-server version :rtype: unicode
[ "Fetch", "the", "last", "version", "from", "pypi", "and", "return", "it", ".", "On", "successful", "fetch", "from", "pypi", "the", "response", "is", "cached", "24h", "on", "error", "it", "is", "cached", "10", "min", "." ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L673-L705
train
nitmir/django-cas-server
cas_server/utils.py
regexpr_validator
def regexpr_validator(value): """ Test that ``value`` is a valid regular expression :param unicode value: A regular expression to test :raises ValidationError: if ``value`` is not a valid regular expression """ try: re.compile(value) except re.error: raise ValidationError( _('"%(value)s" is not a valid regular expression'), params={'value': value} )
python
def regexpr_validator(value): """ Test that ``value`` is a valid regular expression :param unicode value: A regular expression to test :raises ValidationError: if ``value`` is not a valid regular expression """ try: re.compile(value) except re.error: raise ValidationError( _('"%(value)s" is not a valid regular expression'), params={'value': value} )
[ "def", "regexpr_validator", "(", "value", ")", ":", "try", ":", "re", ".", "compile", "(", "value", ")", "except", "re", ".", "error", ":", "raise", "ValidationError", "(", "_", "(", "'\"%(value)s\" is not a valid regular expression'", ")", ",", "params", "=", "{", "'value'", ":", "value", "}", ")" ]
Test that ``value`` is a valid regular expression :param unicode value: A regular expression to test :raises ValidationError: if ``value`` is not a valid regular expression
[ "Test", "that", "value", "is", "a", "valid", "regular", "expression" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L736-L749
train
nitmir/django-cas-server
cas_server/utils.py
LdapHashUserPassword.hash
def hash(cls, scheme, password, salt=None, charset="utf8"): """ Hash ``password`` with ``scheme`` using ``salt``. This three variable beeing encoded in ``charset``. :param bytes scheme: A valid scheme :param bytes password: A byte string to hash using ``scheme`` :param bytes salt: An optional salt to use if ``scheme`` requires any :param str charset: The encoding of ``scheme``, ``password`` and ``salt`` :return: The hashed password encoded with ``charset`` :rtype: bytes """ scheme = scheme.upper() cls._test_scheme(scheme) if salt is None or salt == b"": salt = b"" cls._test_scheme_nosalt(scheme) else: cls._test_scheme_salt(scheme) try: return scheme + base64.b64encode( cls._schemes_to_hash[scheme](password + salt).digest() + salt ) except KeyError: if six.PY3: password = password.decode(charset) salt = salt.decode(charset) if not crypt_salt_is_valid(salt): raise cls.BadSalt("System crypt implementation do not support the salt %r" % salt) hashed_password = crypt.crypt(password, salt) if six.PY3: hashed_password = hashed_password.encode(charset) return scheme + hashed_password
python
def hash(cls, scheme, password, salt=None, charset="utf8"): """ Hash ``password`` with ``scheme`` using ``salt``. This three variable beeing encoded in ``charset``. :param bytes scheme: A valid scheme :param bytes password: A byte string to hash using ``scheme`` :param bytes salt: An optional salt to use if ``scheme`` requires any :param str charset: The encoding of ``scheme``, ``password`` and ``salt`` :return: The hashed password encoded with ``charset`` :rtype: bytes """ scheme = scheme.upper() cls._test_scheme(scheme) if salt is None or salt == b"": salt = b"" cls._test_scheme_nosalt(scheme) else: cls._test_scheme_salt(scheme) try: return scheme + base64.b64encode( cls._schemes_to_hash[scheme](password + salt).digest() + salt ) except KeyError: if six.PY3: password = password.decode(charset) salt = salt.decode(charset) if not crypt_salt_is_valid(salt): raise cls.BadSalt("System crypt implementation do not support the salt %r" % salt) hashed_password = crypt.crypt(password, salt) if six.PY3: hashed_password = hashed_password.encode(charset) return scheme + hashed_password
[ "def", "hash", "(", "cls", ",", "scheme", ",", "password", ",", "salt", "=", "None", ",", "charset", "=", "\"utf8\"", ")", ":", "scheme", "=", "scheme", ".", "upper", "(", ")", "cls", ".", "_test_scheme", "(", "scheme", ")", "if", "salt", "is", "None", "or", "salt", "==", "b\"\"", ":", "salt", "=", "b\"\"", "cls", ".", "_test_scheme_nosalt", "(", "scheme", ")", "else", ":", "cls", ".", "_test_scheme_salt", "(", "scheme", ")", "try", ":", "return", "scheme", "+", "base64", ".", "b64encode", "(", "cls", ".", "_schemes_to_hash", "[", "scheme", "]", "(", "password", "+", "salt", ")", ".", "digest", "(", ")", "+", "salt", ")", "except", "KeyError", ":", "if", "six", ".", "PY3", ":", "password", "=", "password", ".", "decode", "(", "charset", ")", "salt", "=", "salt", ".", "decode", "(", "charset", ")", "if", "not", "crypt_salt_is_valid", "(", "salt", ")", ":", "raise", "cls", ".", "BadSalt", "(", "\"System crypt implementation do not support the salt %r\"", "%", "salt", ")", "hashed_password", "=", "crypt", ".", "crypt", "(", "password", ",", "salt", ")", "if", "six", ".", "PY3", ":", "hashed_password", "=", "hashed_password", ".", "encode", "(", "charset", ")", "return", "scheme", "+", "hashed_password" ]
Hash ``password`` with ``scheme`` using ``salt``. This three variable beeing encoded in ``charset``. :param bytes scheme: A valid scheme :param bytes password: A byte string to hash using ``scheme`` :param bytes salt: An optional salt to use if ``scheme`` requires any :param str charset: The encoding of ``scheme``, ``password`` and ``salt`` :return: The hashed password encoded with ``charset`` :rtype: bytes
[ "Hash", "password", "with", "scheme", "using", "salt", ".", "This", "three", "variable", "beeing", "encoded", "in", "charset", "." ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L530-L562
train
nitmir/django-cas-server
cas_server/utils.py
LdapHashUserPassword.get_salt
def get_salt(cls, hashed_passord): """ Return the salt of ``hashed_passord`` possibly empty :param bytes hashed_passord: A hashed password :return: The salt used by the hashed password (empty if no salt is used) :rtype: bytes :raises BadHash: if no valid scheme is found within ``hashed_passord`` or if the hashed password is too short for the scheme found. """ scheme = cls.get_scheme(hashed_passord) cls._test_scheme(scheme) if scheme in cls.schemes_nosalt: return b"" elif scheme == b'{CRYPT}': return b'$'.join(hashed_passord.split(b'$', 3)[:-1])[len(scheme):] else: try: hashed_passord = base64.b64decode(hashed_passord[len(scheme):]) except (TypeError, binascii.Error) as error: raise cls.BadHash("Bad base64: %s" % error) if len(hashed_passord) < cls._schemes_to_len[scheme]: raise cls.BadHash("Hash too short for the scheme %s" % scheme) return hashed_passord[cls._schemes_to_len[scheme]:]
python
def get_salt(cls, hashed_passord): """ Return the salt of ``hashed_passord`` possibly empty :param bytes hashed_passord: A hashed password :return: The salt used by the hashed password (empty if no salt is used) :rtype: bytes :raises BadHash: if no valid scheme is found within ``hashed_passord`` or if the hashed password is too short for the scheme found. """ scheme = cls.get_scheme(hashed_passord) cls._test_scheme(scheme) if scheme in cls.schemes_nosalt: return b"" elif scheme == b'{CRYPT}': return b'$'.join(hashed_passord.split(b'$', 3)[:-1])[len(scheme):] else: try: hashed_passord = base64.b64decode(hashed_passord[len(scheme):]) except (TypeError, binascii.Error) as error: raise cls.BadHash("Bad base64: %s" % error) if len(hashed_passord) < cls._schemes_to_len[scheme]: raise cls.BadHash("Hash too short for the scheme %s" % scheme) return hashed_passord[cls._schemes_to_len[scheme]:]
[ "def", "get_salt", "(", "cls", ",", "hashed_passord", ")", ":", "scheme", "=", "cls", ".", "get_scheme", "(", "hashed_passord", ")", "cls", ".", "_test_scheme", "(", "scheme", ")", "if", "scheme", "in", "cls", ".", "schemes_nosalt", ":", "return", "b\"\"", "elif", "scheme", "==", "b'{CRYPT}'", ":", "return", "b'$'", ".", "join", "(", "hashed_passord", ".", "split", "(", "b'$'", ",", "3", ")", "[", ":", "-", "1", "]", ")", "[", "len", "(", "scheme", ")", ":", "]", "else", ":", "try", ":", "hashed_passord", "=", "base64", ".", "b64decode", "(", "hashed_passord", "[", "len", "(", "scheme", ")", ":", "]", ")", "except", "(", "TypeError", ",", "binascii", ".", "Error", ")", "as", "error", ":", "raise", "cls", ".", "BadHash", "(", "\"Bad base64: %s\"", "%", "error", ")", "if", "len", "(", "hashed_passord", ")", "<", "cls", ".", "_schemes_to_len", "[", "scheme", "]", ":", "raise", "cls", ".", "BadHash", "(", "\"Hash too short for the scheme %s\"", "%", "scheme", ")", "return", "hashed_passord", "[", "cls", ".", "_schemes_to_len", "[", "scheme", "]", ":", "]" ]
Return the salt of ``hashed_passord`` possibly empty :param bytes hashed_passord: A hashed password :return: The salt used by the hashed password (empty if no salt is used) :rtype: bytes :raises BadHash: if no valid scheme is found within ``hashed_passord`` or if the hashed password is too short for the scheme found.
[ "Return", "the", "salt", "of", "hashed_passord", "possibly", "empty" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/utils.py#L581-L604
train
nitmir/django-cas-server
docs/_ext/djangodocs.py
visit_snippet_latex
def visit_snippet_latex(self, node): """ Latex document generator visit handler """ code = node.rawsource.rstrip('\n') lang = self.hlsettingstack[-1][0] linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, linenos=linenos, **highlight_args) self.body.append( '\n{\\colorbox[rgb]{0.9,0.9,0.9}' '{\\makebox[\\textwidth][l]' '{\\small\\texttt{%s}}}}\n' % ( # Some filenames have '_', which is special in latex. fname.replace('_', r'\_'), ) ) if self.table: hlcode = hlcode.replace('\\begin{Verbatim}', '\\begin{OriginalVerbatim}') self.table.has_problematic = True self.table.has_verbatim = True hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim} hlcode = hlcode.rstrip() + '\n' self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % (self.table and 'Original' or '')) # Prevent rawsource from appearing in output a second time. raise nodes.SkipNode
python
def visit_snippet_latex(self, node): """ Latex document generator visit handler """ code = node.rawsource.rstrip('\n') lang = self.hlsettingstack[-1][0] linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1 fname = node['filename'] highlight_args = node.get('highlight_args', {}) if 'language' in node: # code-block directives lang = node['language'] highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, linenos=linenos, **highlight_args) self.body.append( '\n{\\colorbox[rgb]{0.9,0.9,0.9}' '{\\makebox[\\textwidth][l]' '{\\small\\texttt{%s}}}}\n' % ( # Some filenames have '_', which is special in latex. fname.replace('_', r'\_'), ) ) if self.table: hlcode = hlcode.replace('\\begin{Verbatim}', '\\begin{OriginalVerbatim}') self.table.has_problematic = True self.table.has_verbatim = True hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim} hlcode = hlcode.rstrip() + '\n' self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % (self.table and 'Original' or '')) # Prevent rawsource from appearing in output a second time. raise nodes.SkipNode
[ "def", "visit_snippet_latex", "(", "self", ",", "node", ")", ":", "code", "=", "node", ".", "rawsource", ".", "rstrip", "(", "'\\n'", ")", "lang", "=", "self", ".", "hlsettingstack", "[", "-", "1", "]", "[", "0", "]", "linenos", "=", "code", ".", "count", "(", "'\\n'", ")", ">=", "self", ".", "hlsettingstack", "[", "-", "1", "]", "[", "1", "]", "-", "1", "fname", "=", "node", "[", "'filename'", "]", "highlight_args", "=", "node", ".", "get", "(", "'highlight_args'", ",", "{", "}", ")", "if", "'language'", "in", "node", ":", "# code-block directives", "lang", "=", "node", "[", "'language'", "]", "highlight_args", "[", "'force'", "]", "=", "True", "if", "'linenos'", "in", "node", ":", "linenos", "=", "node", "[", "'linenos'", "]", "def", "warner", "(", "msg", ")", ":", "self", ".", "builder", ".", "warn", "(", "msg", ",", "(", "self", ".", "curfilestack", "[", "-", "1", "]", ",", "node", ".", "line", ")", ")", "hlcode", "=", "self", ".", "highlighter", ".", "highlight_block", "(", "code", ",", "lang", ",", "warn", "=", "warner", ",", "linenos", "=", "linenos", ",", "*", "*", "highlight_args", ")", "self", ".", "body", ".", "append", "(", "'\\n{\\\\colorbox[rgb]{0.9,0.9,0.9}'", "'{\\\\makebox[\\\\textwidth][l]'", "'{\\\\small\\\\texttt{%s}}}}\\n'", "%", "(", "# Some filenames have '_', which is special in latex.", "fname", ".", "replace", "(", "'_'", ",", "r'\\_'", ")", ",", ")", ")", "if", "self", ".", "table", ":", "hlcode", "=", "hlcode", ".", "replace", "(", "'\\\\begin{Verbatim}'", ",", "'\\\\begin{OriginalVerbatim}'", ")", "self", ".", "table", ".", "has_problematic", "=", "True", "self", ".", "table", ".", "has_verbatim", "=", "True", "hlcode", "=", "hlcode", ".", "rstrip", "(", ")", "[", ":", "-", "14", "]", "# strip \\end{Verbatim}", "hlcode", "=", "hlcode", ".", "rstrip", "(", ")", "+", "'\\n'", "self", ".", "body", ".", "append", "(", "'\\n'", "+", "hlcode", "+", "'\\\\end{%sVerbatim}\\n'", "%", "(", "self", ".", "table", "and", "'Original'", "or", "''", ")", ")", "# Prevent rawsource from appearing in output a second time.", "raise", "nodes", ".", "SkipNode" ]
Latex document generator visit handler
[ "Latex", "document", "generator", "visit", "handler" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/docs/_ext/djangodocs.py#L121-L166
train
nitmir/django-cas-server
cas_server/views.py
LogoutMixin.logout
def logout(self, all_session=False): """ effectively destroy a CAS session :param boolean all_session: If ``True`` destroy all the user sessions, otherwise destroy the current user session. :return: The number of destroyed sessions :rtype: int """ # initialize the counter of the number of destroyed sesisons session_nb = 0 # save the current user username before flushing the session username = self.request.session.get("username") if username: if all_session: logger.info("Logging out user %s from all sessions." % username) else: logger.info("Logging out user %s." % username) users = [] # try to get the user from the current session try: users.append( models.User.objects.get( username=username, session_key=self.request.session.session_key ) ) except models.User.DoesNotExist: # if user not found in database, flush the session anyway self.request.session.flush() # If all_session is set, search all of the user sessions if all_session: users.extend( models.User.objects.filter( username=username ).exclude( session_key=self.request.session.session_key ) ) # Iterate over all user sessions that have to be logged out for user in users: # get the user session session = SessionStore(session_key=user.session_key) # flush the session session.flush() # send SLO requests user.logout(self.request) # delete the user user.delete() # increment the destroyed session counter session_nb += 1 if username: logger.info("User %s logged out" % username) return session_nb
python
def logout(self, all_session=False): """ effectively destroy a CAS session :param boolean all_session: If ``True`` destroy all the user sessions, otherwise destroy the current user session. :return: The number of destroyed sessions :rtype: int """ # initialize the counter of the number of destroyed sesisons session_nb = 0 # save the current user username before flushing the session username = self.request.session.get("username") if username: if all_session: logger.info("Logging out user %s from all sessions." % username) else: logger.info("Logging out user %s." % username) users = [] # try to get the user from the current session try: users.append( models.User.objects.get( username=username, session_key=self.request.session.session_key ) ) except models.User.DoesNotExist: # if user not found in database, flush the session anyway self.request.session.flush() # If all_session is set, search all of the user sessions if all_session: users.extend( models.User.objects.filter( username=username ).exclude( session_key=self.request.session.session_key ) ) # Iterate over all user sessions that have to be logged out for user in users: # get the user session session = SessionStore(session_key=user.session_key) # flush the session session.flush() # send SLO requests user.logout(self.request) # delete the user user.delete() # increment the destroyed session counter session_nb += 1 if username: logger.info("User %s logged out" % username) return session_nb
[ "def", "logout", "(", "self", ",", "all_session", "=", "False", ")", ":", "# initialize the counter of the number of destroyed sesisons", "session_nb", "=", "0", "# save the current user username before flushing the session", "username", "=", "self", ".", "request", ".", "session", ".", "get", "(", "\"username\"", ")", "if", "username", ":", "if", "all_session", ":", "logger", ".", "info", "(", "\"Logging out user %s from all sessions.\"", "%", "username", ")", "else", ":", "logger", ".", "info", "(", "\"Logging out user %s.\"", "%", "username", ")", "users", "=", "[", "]", "# try to get the user from the current session", "try", ":", "users", ".", "append", "(", "models", ".", "User", ".", "objects", ".", "get", "(", "username", "=", "username", ",", "session_key", "=", "self", ".", "request", ".", "session", ".", "session_key", ")", ")", "except", "models", ".", "User", ".", "DoesNotExist", ":", "# if user not found in database, flush the session anyway", "self", ".", "request", ".", "session", ".", "flush", "(", ")", "# If all_session is set, search all of the user sessions", "if", "all_session", ":", "users", ".", "extend", "(", "models", ".", "User", ".", "objects", ".", "filter", "(", "username", "=", "username", ")", ".", "exclude", "(", "session_key", "=", "self", ".", "request", ".", "session", ".", "session_key", ")", ")", "# Iterate over all user sessions that have to be logged out", "for", "user", "in", "users", ":", "# get the user session", "session", "=", "SessionStore", "(", "session_key", "=", "user", ".", "session_key", ")", "# flush the session", "session", ".", "flush", "(", ")", "# send SLO requests", "user", ".", "logout", "(", "self", ".", "request", ")", "# delete the user", "user", ".", "delete", "(", ")", "# increment the destroyed session counter", "session_nb", "+=", "1", "if", "username", ":", "logger", ".", "info", "(", "\"User %s logged out\"", "%", "username", ")", "return", "session_nb" ]
effectively destroy a CAS session :param boolean all_session: If ``True`` destroy all the user sessions, otherwise destroy the current user session. :return: The number of destroyed sessions :rtype: int
[ "effectively", "destroy", "a", "CAS", "session" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L53-L108
train