desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Merge managed keys with local keys'
def all_keys(self):
keys = self.list_keys() keys.update(self.local_keys()) return keys
'Return a dict of managed keys under a named status'
def list_status(self, match):
(acc, pre, rej, den) = self._check_minions_directories() ret = {} if match.startswith(u'acc'): ret[os.path.basename(acc)] = [] for fn_ in salt.utils.isorted(os.listdir(acc)): if (not fn_.startswith(u'.')): if os.path.isfile(os.path.join(acc, fn_)): ret[os.path.basename(acc)].append(fn_) elif (match.startswith(u'pre') or match.startswith(u'un')): ret[os.path.basename(pre)] = [] for fn_ in salt.utils.isorted(os.listdir(pre)): if (not fn_.startswith(u'.')): if os.path.isfile(os.path.join(pre, fn_)): ret[os.path.basename(pre)].append(fn_) elif match.startswith(u'rej'): ret[os.path.basename(rej)] = [] for fn_ in salt.utils.isorted(os.listdir(rej)): if (not fn_.startswith(u'.')): if os.path.isfile(os.path.join(rej, fn_)): ret[os.path.basename(rej)].append(fn_) elif (match.startswith(u'den') and (den is not None)): ret[os.path.basename(den)] = [] for fn_ in salt.utils.isorted(os.listdir(den)): if (not fn_.startswith(u'.')): if os.path.isfile(os.path.join(den, fn_)): ret[os.path.basename(den)].append(fn_) elif match.startswith(u'all'): return self.all_keys() return ret
'Return the specified public key or keys based on a glob'
def key_str(self, match):
ret = {} for (status, keys) in six.iteritems(self.name_match(match)): ret[status] = {} for key in salt.utils.isorted(keys): path = os.path.join(self.opts[u'pki_dir'], status, key) with salt.utils.files.fopen(path, u'r') as fp_: ret[status][key] = fp_.read() return ret
'Return all managed key strings'
def key_str_all(self):
ret = {} for (status, keys) in six.iteritems(self.list_keys()): ret[status] = {} for key in salt.utils.isorted(keys): path = os.path.join(self.opts[u'pki_dir'], status, key) with salt.utils.files.fopen(path, u'r') as fp_: ret[status][key] = fp_.read() return ret
'Accept public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict".'
def accept(self, match=None, match_dict=None, include_rejected=False, include_denied=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} keydirs = [self.PEND] if include_rejected: keydirs.append(self.REJ) if include_denied: keydirs.append(self.DEN) for keydir in keydirs: for key in matches.get(keydir, []): try: shutil.move(os.path.join(self.opts[u'pki_dir'], keydir, key), os.path.join(self.opts[u'pki_dir'], self.ACC, key)) eload = {u'result': True, u'act': u'accept', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (IOError, OSError): pass return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Accept all keys in pre'
def accept_all(self):
keys = self.list_keys() for key in keys[self.PEND]: try: shutil.move(os.path.join(self.opts[u'pki_dir'], self.PEND, key), os.path.join(self.opts[u'pki_dir'], self.ACC, key)) eload = {u'result': True, u'act': u'accept', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (IOError, OSError): pass return self.list_keys()
'Delete public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict". To preserve the master caches of minions who are matched, set preserve_minions'
def delete_key(self, match=None, match_dict=None, preserve_minions=False, revoke_auth=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} for (status, keys) in six.iteritems(matches): for key in keys: try: if revoke_auth: if (self.opts.get(u'rotate_aes_key') is False): print(u'Immediate auth revocation specified but AES key rotation not allowed. Minion will not be disconnected until the master AES key is rotated.') else: try: client = salt.client.get_local_client(mopts=self.opts) client.cmd_async(key, u'saltutil.revoke_auth') except salt.exceptions.SaltClientError: print(u"Cannot contact Salt master. Connection for {0} will remain up until master AES key is rotated or auth is revoked with 'saltutil.revoke_auth'.".format(key)) os.remove(os.path.join(self.opts[u'pki_dir'], status, key)) eload = {u'result': True, u'act': u'delete', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (OSError, IOError): pass if preserve_minions: preserve_minions_list = matches.get(u'minions', []) else: preserve_minions_list = [] self.check_minion_cache(preserve_minions=preserve_minions_list) if self.opts.get(u'rotate_aes_key'): salt.crypt.dropfile(self.opts[u'cachedir'], self.opts[u'user']) return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Delete all denied keys'
def delete_den(self):
keys = self.list_keys() for (status, keys) in six.iteritems(self.list_keys()): for key in keys[self.DEN]: try: os.remove(os.path.join(self.opts[u'pki_dir'], status, key)) eload = {u'result': True, u'act': u'delete', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (OSError, IOError): pass self.check_minion_cache() return self.list_keys()
'Delete all keys'
def delete_all(self):
for (status, keys) in six.iteritems(self.list_keys()): for key in keys: try: os.remove(os.path.join(self.opts[u'pki_dir'], status, key)) eload = {u'result': True, u'act': u'delete', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (OSError, IOError): pass self.check_minion_cache() if self.opts.get(u'rotate_aes_key'): salt.crypt.dropfile(self.opts[u'cachedir'], self.opts[u'user']) return self.list_keys()
'Reject public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict".'
def reject(self, match=None, match_dict=None, include_accepted=False, include_denied=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} keydirs = [self.PEND] if include_accepted: keydirs.append(self.ACC) if include_denied: keydirs.append(self.DEN) for keydir in keydirs: for key in matches.get(keydir, []): try: shutil.move(os.path.join(self.opts[u'pki_dir'], keydir, key), os.path.join(self.opts[u'pki_dir'], self.REJ, key)) eload = {u'result': True, u'act': u'reject', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (IOError, OSError): pass self.check_minion_cache() if self.opts.get(u'rotate_aes_key'): salt.crypt.dropfile(self.opts[u'cachedir'], self.opts[u'user']) return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Reject all keys in pre'
def reject_all(self):
keys = self.list_keys() for key in keys[self.PEND]: try: shutil.move(os.path.join(self.opts[u'pki_dir'], self.PEND, key), os.path.join(self.opts[u'pki_dir'], self.REJ, key)) eload = {u'result': True, u'act': u'reject', u'id': key} self.event.fire_event(eload, salt.utils.event.tagify(prefix=u'key')) except (IOError, OSError): pass self.check_minion_cache() if self.opts.get(u'rotate_aes_key'): salt.crypt.dropfile(self.opts[u'cachedir'], self.opts[u'user']) return self.list_keys()
'Return the fingerprint for a specified key'
def finger(self, match, hash_type=None):
if (hash_type is None): hash_type = __opts__[u'hash_type'] matches = self.name_match(match, True) ret = {} for (status, keys) in six.iteritems(matches): ret[status] = {} for key in keys: if (status == u'local'): path = os.path.join(self.opts[u'pki_dir'], key) else: path = os.path.join(self.opts[u'pki_dir'], status, key) ret[status][key] = salt.utils.pem_finger(path, sum_type=hash_type) return ret
'Return fingerprints for all keys'
def finger_all(self, hash_type=None):
if (hash_type is None): hash_type = __opts__[u'hash_type'] ret = {} for (status, keys) in six.iteritems(self.all_keys()): ret[status] = {} for key in keys: if (status == u'local'): path = os.path.join(self.opts[u'pki_dir'], key) else: path = os.path.join(self.opts[u'pki_dir'], status, key) ret[status][key] = salt.utils.pem_finger(path, sum_type=hash_type) return ret
'Return the minion keys directory paths'
def _check_minions_directories(self):
accepted = os.path.join(self.opts[u'pki_dir'], self.ACC) pre = os.path.join(self.opts[u'pki_dir'], self.PEND) rejected = os.path.join(self.opts[u'pki_dir'], self.REJ) return (accepted, pre, rejected, None)
'Check the minion cache to make sure that old minion data is cleared'
def check_minion_cache(self, preserve_minions=False):
keys = self.list_keys() minions = [] for (key, val) in six.iteritems(keys): minions.extend(val) m_cache = os.path.join(self.opts[u'cachedir'], u'minions') if os.path.isdir(m_cache): for minion in os.listdir(m_cache): if (minion not in minions): shutil.rmtree(os.path.join(m_cache, minion)) cache = salt.cache.factory(self.opts) clist = cache.ls(self.ACC) if clist: for minion in clist: if ((minion not in minions) and (minion not in preserve_minions)): cache.flush(u'{0}/{1}'.format(self.ACC, minion)) kind = self.opts.get(u'__role', u'') if (kind not in kinds.APPL_KINDS): emsg = u"Invalid application kind = '{0}'.".format(kind) log.error((emsg + u'\n')) raise ValueError(emsg) role = self.opts.get(u'id', u'') if (not role): emsg = u'Invalid id.' log.error((emsg + u'\n')) raise ValueError(emsg) name = u'{0}_{1}'.format(role, kind) road_cache = os.path.join(self.opts[u'cachedir'], u'raet', name, u'remote') if os.path.isdir(road_cache): for road in os.listdir(road_cache): (root, ext) = os.path.splitext(road) if (ext not in (u'.json', u'.msgpack')): continue (prefix, sep, name) = root.partition(u'.') if ((not name) or (prefix != u'estate')): continue path = os.path.join(road_cache, road) with salt.utils.files.fopen(path, u'rb') as fp_: if (ext == u'.json'): data = json.load(fp_) elif (ext == u'.msgpack'): data = msgpack.load(fp_) if (data[u'role'] not in minions): os.remove(path)
'Use libnacl to generate and safely save a private key'
def gen_keys(self, keydir=None, keyname=None, keysize=None, user=None):
import libnacl.dual d_key = libnacl.dual.DualSecret() (keydir, keyname, _, _) = self._get_key_attrs(keydir, keyname, keysize, user) path = u'{0}.key'.format(os.path.join(keydir, keyname)) d_key.save(path, u'msgpack')
'Log if the master is not running NOT YET IMPLEMENTED'
def check_master(self):
return True
'Return a dict of local keys'
def local_keys(self):
ret = {u'local': []} fn_ = os.path.join(self.opts[u'pki_dir'], u'local.key') if os.path.isfile(fn_): ret[u'local'].append(fn_) return ret
'Accepts the minion id, device id, curve public and verify keys. If the key is not present, put it in pending and return "pending", If the key has been accepted return "accepted" if the key should be rejected, return "rejected"'
def status(self, minion_id, pub, verify):
(acc, pre, rej, _) = self._check_minions_directories() acc_path = os.path.join(acc, minion_id) pre_path = os.path.join(pre, minion_id) rej_path = os.path.join(rej, minion_id) keydata = {u'minion_id': minion_id, u'pub': pub, u'verify': verify} if self.opts[u'open_mode']: with salt.utils.files.fopen(acc_path, u'w+b') as fp_: fp_.write(self.serial.dumps(keydata)) return self.ACC if os.path.isfile(rej_path): log.debug(u'Rejection Reason: Keys already rejected.\n') return self.REJ elif os.path.isfile(acc_path): with salt.utils.files.fopen(acc_path, u'rb') as fp_: keydata = self.serial.loads(fp_.read()) if ((keydata[u'pub'] == pub) and (keydata[u'verify'] == verify)): return self.ACC else: log.debug(u'Rejection Reason: Keys not match prior accepted.\n') return self.REJ elif os.path.isfile(pre_path): auto_reject = self.auto_key.check_autoreject(minion_id) auto_sign = self.auto_key.check_autosign(minion_id) with salt.utils.files.fopen(pre_path, u'rb') as fp_: keydata = self.serial.loads(fp_.read()) if ((keydata[u'pub'] == pub) and (keydata[u'verify'] == verify)): if auto_reject: self.reject(minion_id) log.debug(u'Rejection Reason: Auto reject pended.\n') return self.REJ elif auto_sign: self.accept(minion_id) return self.ACC return self.PEND else: log.debug(u'Rejection Reason: Keys not match prior pended.\n') return self.REJ auto_reject = self.auto_key.check_autoreject(minion_id) auto_sign = self.auto_key.check_autosign(minion_id) if self.opts[u'auto_accept']: w_path = acc_path ret = self.ACC elif auto_sign: w_path = acc_path ret = self.ACC elif auto_reject: w_path = rej_path log.debug(u'Rejection Reason: Auto reject new.\n') ret = self.REJ else: w_path = pre_path ret = self.PEND with salt.utils.files.fopen(w_path, u'w+b') as fp_: fp_.write(self.serial.dumps(keydata)) return ret
'Return the key string in the form of: pub: <pub> verify: <verify>'
def _get_key_str(self, minion_id, status):
path = os.path.join(self.opts[u'pki_dir'], status, minion_id) with salt.utils.files.fopen(path, u'r') as fp_: keydata = self.serial.loads(fp_.read()) return u'pub: {0}\nverify: {1}'.format(keydata[u'pub'], keydata[u'verify'])
'Return a sha256 kingerprint for the key'
def _get_key_finger(self, path):
with salt.utils.files.fopen(path, u'r') as fp_: keydata = self.serial.loads(fp_.read()) key = u'pub: {0}\nverify: {1}'.format(keydata[u'pub'], keydata[u'verify']) return hashlib.sha256(key).hexdigest()
'Return the specified public key or keys based on a glob'
def key_str(self, match):
ret = {} for (status, keys) in six.iteritems(self.name_match(match)): ret[status] = {} for key in salt.utils.isorted(keys): ret[status][key] = self._get_key_str(key, status) return ret
'Return all managed key strings'
def key_str_all(self):
ret = {} for (status, keys) in six.iteritems(self.list_keys()): ret[status] = {} for key in salt.utils.isorted(keys): ret[status][key] = self._get_key_str(key, status) return ret
'Accept public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict".'
def accept(self, match=None, match_dict=None, include_rejected=False, include_denied=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} keydirs = [self.PEND] if include_rejected: keydirs.append(self.REJ) if include_denied: keydirs.append(self.DEN) for keydir in keydirs: for key in matches.get(keydir, []): try: shutil.move(os.path.join(self.opts[u'pki_dir'], keydir, key), os.path.join(self.opts[u'pki_dir'], self.ACC, key)) except (IOError, OSError): pass return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Accept all keys in pre'
def accept_all(self):
keys = self.list_keys() for key in keys[self.PEND]: try: shutil.move(os.path.join(self.opts[u'pki_dir'], self.PEND, key), os.path.join(self.opts[u'pki_dir'], self.ACC, key)) except (IOError, OSError): pass return self.list_keys()
'Delete public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict".'
def delete_key(self, match=None, match_dict=None, preserve_minions=False, revoke_auth=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} for (status, keys) in six.iteritems(matches): for key in keys: if revoke_auth: if (self.opts.get(u'rotate_aes_key') is False): print(u'Immediate auth revocation specified but AES key rotation not allowed. Minion will not be disconnected until the master AES key is rotated.') else: try: client = salt.client.get_local_client(mopts=self.opts) client.cmd_async(key, u'saltutil.revoke_auth') except salt.exceptions.SaltClientError: print(u"Cannot contact Salt master. Connection for {0} will remain up until master AES key is rotated or auth is revoked with 'saltutil.revoke_auth'.".format(key)) try: os.remove(os.path.join(self.opts[u'pki_dir'], status, key)) except (OSError, IOError): pass self.check_minion_cache(preserve_minions=matches.get(u'minions', [])) return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Delete all keys'
def delete_all(self):
for (status, keys) in six.iteritems(self.list_keys()): for key in keys: try: os.remove(os.path.join(self.opts[u'pki_dir'], status, key)) except (OSError, IOError): pass self.check_minion_cache() return self.list_keys()
'Reject public keys. If "match" is passed, it is evaluated as a glob. Pre-gathered matches can also be passed via "match_dict".'
def reject(self, match=None, match_dict=None, include_accepted=False, include_denied=False):
if (match is not None): matches = self.name_match(match) elif ((match_dict is not None) and isinstance(match_dict, dict)): matches = match_dict else: matches = {} keydirs = [self.PEND] if include_accepted: keydirs.append(self.ACC) if include_denied: keydirs.append(self.DEN) for keydir in keydirs: for key in matches.get(keydir, []): try: shutil.move(os.path.join(self.opts[u'pki_dir'], keydir, key), os.path.join(self.opts[u'pki_dir'], self.REJ, key)) except (IOError, OSError): pass self.check_minion_cache() return (self.name_match(match) if (match is not None) else self.dict_match(matches))
'Reject all keys in pre'
def reject_all(self):
keys = self.list_keys() for key in keys[self.PEND]: try: shutil.move(os.path.join(self.opts[u'pki_dir'], self.PEND, key), os.path.join(self.opts[u'pki_dir'], self.REJ, key)) except (IOError, OSError): pass self.check_minion_cache() return self.list_keys()
'Return the fingerprint for a specified key'
def finger(self, match, hash_type=None):
if (hash_type is None): hash_type = __opts__[u'hash_type'] matches = self.name_match(match, True) ret = {} for (status, keys) in six.iteritems(matches): ret[status] = {} for key in keys: if (status == u'local'): path = os.path.join(self.opts[u'pki_dir'], key) else: path = os.path.join(self.opts[u'pki_dir'], status, key) ret[status][key] = self._get_key_finger(path) return ret
'Return fingerprints for all keys'
def finger_all(self, hash_type=None):
if (hash_type is None): hash_type = __opts__[u'hash_type'] ret = {} for (status, keys) in six.iteritems(self.list_keys()): ret[status] = {} for key in keys: if (status == u'local'): path = os.path.join(self.opts[u'pki_dir'], key) else: path = os.path.join(self.opts[u'pki_dir'], status, key) ret[status][key] = self._get_key_finger(path) return ret
'Return a dict of all remote key data'
def read_all_remote(self):
data = {} for (status, mids) in six.iteritems(self.list_keys()): for mid in mids: keydata = self.read_remote(mid, status) if keydata: keydata[u'acceptance'] = status data[mid] = keydata return data
'Read in a remote key of status'
def read_remote(self, minion_id, status=ACC):
path = os.path.join(self.opts[u'pki_dir'], status, minion_id) if (not os.path.isfile(path)): return {} with salt.utils.files.fopen(path, u'rb') as fp_: return self.serial.loads(fp_.read())
'Read in the local private keys, return an empy dict if the keys do not exist'
def read_local(self):
path = os.path.join(self.opts[u'pki_dir'], u'local.key') if (not os.path.isfile(path)): return {} with salt.utils.files.fopen(path, u'rb') as fp_: return self.serial.loads(fp_.read())
'Write the private key and the signing key to a file on disk'
def write_local(self, priv, sign):
keydata = {u'priv': priv, u'sign': sign} path = os.path.join(self.opts[u'pki_dir'], u'local.key') c_umask = os.umask(191) if os.path.exists(path): os.chmod(path, (stat.S_IWUSR | stat.S_IRUSR)) with salt.utils.files.fopen(path, u'w+') as fp_: fp_.write(self.serial.dumps(keydata)) os.chmod(path, stat.S_IRUSR) os.umask(c_umask)
'Delete the local private key file'
def delete_local(self):
path = os.path.join(self.opts[u'pki_dir'], u'local.key') if os.path.isfile(path): os.remove(path)
'Delete the private key directory'
def delete_pki_dir(self):
path = self.opts[u'pki_dir'] if os.path.exists(path): shutil.rmtree(path)
'Return a list of loaded roster backends'
def _gen_back(self):
back = set() if self.backends: for backend in self.backends: fun = '{0}.targets'.format(backend) if (fun in self.rosters): back.add(backend) return back return sorted(back)
'Return a dict of {\'id\': {\'ipv4\': <ipaddr>}} data sets to be used as targets given the passed tgt and tgt_type'
def targets(self, tgt, tgt_type):
targets = {} for back in self._gen_back(): f_str = '{0}.targets'.format(back) if (f_str not in self.rosters): continue try: targets.update(self.rosters[f_str](tgt, tgt_type)) except salt.exceptions.SaltRenderError as exc: log.error('Unable to render roster file: {0}'.format(exc)) except IOError as exc: pass log.debug('Matched minions: {0}'.format(targets)) return targets
'Execute the correct tgt_type routine and return'
def targets(self):
try: routine = getattr(self, 'get_{0}'.format(self.tgt_type)) except AttributeError: return {} return routine()
'Return minions that match via glob'
def get_glob(self):
ret = dict() for (key, value) in six.iteritems(self.groups): for (host, info) in six.iteritems(value): if fnmatch.fnmatch(host, self.tgt): ret[host] = info for nodegroup in self.groups: if fnmatch.fnmatch(nodegroup, self.tgt): ret.update(self.groups[nodegroup]) for parent_nodegroup in self.parents: if fnmatch.fnmatch(parent_nodegroup, self.tgt): ret.update(self._get_parent(parent_nodegroup)) return ret
'Recursively resolve all [*:children] group blocks'
def _get_parent(self, parent_nodegroup):
ret = dict() for nodegroup in self.parents[parent_nodegroup]: if (nodegroup in self.parents): ret.update(self._get_parent(nodegroup)) elif (nodegroup in self.groups): ret.update(self.groups[nodegroup]) return ret
'Parse lines in the inventory file that are under the same group block'
def _parse_group_line(self, line, varname):
line_args = salt.utils.args.shlex_split(line) name = line_args[0] host = {line_args[0]: dict()} for arg in line_args[1:]: (key, value) = arg.split('=') host[name][CONVERSION[key]] = value if ('sudo' in host[name]): (host[name]['passwd'], host[name]['sudo']) = (host[name]['sudo'], True) if self.groups.get(varname, ''): self.groups[varname].update(host) else: self.groups[varname] = host
'Parse lines in the inventory file that are under the same [*:vars] block'
def _parse_hostvars_line(self, line, varname):
(key, value) = line.split('=') if (varname not in self.hostvars): self.hostvars[varname] = dict() self.hostvars[varname][key] = value
'Parse lines in the inventory file that are under the same [*:children] block'
def _parse_parents_line(self, line, varname):
if (varname not in self.parents): self.parents[varname] = [] self.parents[varname].append(line)
'Parse group data from inventory_file'
def _parse_groups(self, key, value):
host = dict() if (key not in self.groups): self.groups[key] = dict() for server in value: tmp = self.meta.get('hostvars', {}).get(server, False) if (tmp is not False): if (server not in host): host[server] = dict() for (tmpkey, tmpval) in six.iteritems(tmp): if (tmpkey in CONVERSION): host[server][CONVERSION[tmpkey]] = tmpval if ('sudo' in host[server]): (host[server]['passwd'], host[server]['sudo']) = (host[server]['sudo'], True) self.groups[key].update(host)
'Parse hostvars data from inventory_file'
def _parse_hostvars(self, key, value):
if (key not in self.hostvars): self.hostvars[key] = dict() self.hostvars[key] = value
'Parse children data from inventory_file'
def _parse_parents(self, key, value):
if (key not in self.parents): self.parents[key] = [] self.parents[key].extend(value)
'Return ip addrs based on netmask, sitting in the "glob" spot because it is the default'
def targets(self):
addrs = () ret = {} ports = __opts__['ssh_scan_ports'] if (not isinstance(ports, list)): ports = list(map(int, str(ports).split(','))) try: addrs = [ipaddress.ip_address(self.tgt)] except ValueError: try: addrs = ipaddress.ip_network(self.tgt).hosts() except ValueError: pass for addr in addrs: addr = str(addr) ret[addr] = __opts__.get('roster_defaults', {}).copy() log.trace('Scanning host: {0}'.format(addr)) for port in ports: log.trace('Scanning port: {0}'.format(port)) try: sock = salt.utils.network.get_socket(addr, socket.SOCK_STREAM) sock.settimeout(float(__opts__['ssh_scan_timeout'])) sock.connect((addr, port)) sock.shutdown(socket.SHUT_RDWR) sock.close() ret[addr].update({'host': addr, 'port': port}) except socket.error: pass return ret
'Execute the correct tgt_type routine and return'
def targets(self):
try: return getattr(self, 'ret_{0}_minions'.format(self.tgt_type))() except AttributeError: return {}
'Return minions that match via glob'
def ret_glob_minions(self):
minions = {} for minion in self.raw: if fnmatch.fnmatch(minion, self.tgt): data = self.get_data(minion) if data: minions[minion] = data return minions
'Return minions that match via pcre'
def ret_pcre_minions(self):
minions = {} for minion in self.raw: if re.match(self.tgt, minion): data = self.get_data(minion) if data: minions[minion] = data return minions
'Return minions that match via list'
def ret_list_minions(self):
minions = {} if (not isinstance(self.tgt, list)): self.tgt = self.tgt.split(',') for minion in self.raw: if (minion in self.tgt): data = self.get_data(minion) if data: minions[minion] = data return minions
'Return minions which match the special list-only groups defined by ssh_list_nodegroups'
def ret_nodegroup_minions(self):
minions = {} nodegroup = __opts__.get('ssh_list_nodegroups', {}).get(self.tgt, []) if (not isinstance(nodegroup, list)): nodegroup = nodegroup.split(',') for minion in self.raw: if (minion in nodegroup): data = self.get_data(minion) if data: minions[minion] = data return minions
'Return minions that are returned by a range query'
def ret_range_minions(self):
if (HAS_RANGE is False): raise RuntimeError("Python lib 'seco.range' is not available") minions = {} range_hosts = _convert_range_to_list(self.tgt, __opts__['range_server']) for minion in self.raw: if (minion in range_hosts): data = self.get_data(minion) if data: minions[minion] = data return minions
'Return the configured ip'
def get_data(self, minion):
ret = __opts__.get('roster_defaults', {}) if isinstance(self.raw[minion], string_types): ret.update({'host': self.raw[minion]}) return ret elif isinstance(self.raw[minion], dict): ret.update(self.raw[minion]) return ret return False
'Enforce the states in a template'
def render_template(self, template, **kwargs):
high = compile_template(template, self.rend, self.opts[u'renderer'], self.opts[u'renderer_blacklist'], self.opts[u'renderer_whitelist'], **kwargs) if (not high): return high return self.pad_funcs(high)
'Turns dot delimited function refs into function strings'
def pad_funcs(self, high):
for name in high: if (not isinstance(high[name], dict)): if isinstance(high[name], six.string_types): if (u'.' in high[name]): comps = high[name].split(u'.') if (len(comps) >= 2): comps[1] = u'.'.join(comps[1:len(comps)]) high[name] = {comps[0]: [comps[1]]} continue continue skeys = set() for key in sorted(high[name]): if key.startswith(u'_'): continue if (not isinstance(high[name][key], list)): continue if (u'.' in key): comps = key.split(u'.') if (len(comps) >= 2): comps[1] = u'.'.join(comps[1:len(comps)]) if (comps[0] in skeys): continue high[name][comps[0]] = high[name].pop(key) high[name][comps[0]].append(comps[1]) skeys.add(comps[0]) continue skeys.add(key) return high
'Verify that the high data is viable and follows the data structure'
def verify_high(self, high):
errors = [] if (not isinstance(high, dict)): errors.append(u'High data is not a dictionary and is invalid') reqs = OrderedDict() for (name, body) in six.iteritems(high): if name.startswith(u'__'): continue if (not isinstance(name, six.string_types)): errors.append(u"ID '{0}' in SLS '{1}' is not formed as a string, but is a {2}".format(name, body[u'__sls__'], type(name).__name__)) if (not isinstance(body, dict)): err = u'The type {0} in {1} is not formatted as a dictionary'.format(name, body) errors.append(err) continue for state in body: if state.startswith(u'__'): continue if (not isinstance(body[state], list)): errors.append(u"State '{0}' in SLS '{1}' is not formed as a list".format(name, body[u'__sls__'])) else: fun = 0 if (u'.' in state): fun += 1 for arg in body[state]: if isinstance(arg, six.string_types): fun += 1 if (u' ' in arg.strip()): errors.append(u'The function "{0}" in state "{1}" in SLS "{2}" has whitespace, a function with whitespace is not supported, perhaps this is an argument that is missing a ":"'.format(arg, name, body[u'__sls__'])) elif isinstance(arg, dict): argfirst = next(iter(arg)) if (argfirst in (u'require', u'watch', u'prereq', u'onchanges')): if (not isinstance(arg[argfirst], list)): errors.append(u"The {0} statement in state '{1}' in SLS '{2}' needs to be formed as a list".format(argfirst, name, body[u'__sls__'])) else: reqs[name] = {u'state': state} for req in arg[argfirst]: if isinstance(req, six.string_types): req = {u'id': req} if (not isinstance(req, dict)): err = u'Requisite declaration {0} in SLS {1} is not formed as a single key dictionary'.format(req, body[u'__sls__']) errors.append(err) continue req_key = next(iter(req)) req_val = req[req_key] if (u'.' in req_key): errors.append(u"Invalid requisite type '{0}' in state '{1}', in SLS '{2}'. Requisite types must not contain dots, did you mean '{3}'?".format(req_key, name, body[u'__sls__'], req_key[:req_key.find(u'.')])) if (not ishashable(req_val)): errors.append(u'Illegal requisite "{0}", is SLS {1}\n'.format(str(req_val), body[u'__sls__'])) continue reqs[name][req_val] = req_key if (req_val in reqs): if (name in reqs[req_val]): if (reqs[req_val][name] == state): if (reqs[req_val][u'state'] == reqs[name][req_val]): err = u'A recursive requisite was found, SLS "{0}" ID "{1}" ID "{2}"'.format(body[u'__sls__'], name, req_val) errors.append(err) if (len(list(arg)) != 1): errors.append(u"Multiple dictionaries defined in argument of state '{0}' in SLS '{1}'".format(name, body[u'__sls__'])) if (not fun): if ((state == u'require') or (state == u'watch')): continue errors.append(u"No function declared in state '{0}' in SLS '{1}'".format(state, body[u'__sls__'])) elif (fun > 1): errors.append(u"Too many functions declared in state '{0}' in SLS '{1}'".format(state, body[u'__sls__'])) return errors
'Sort the chunk list verifying that the chunks follow the order specified in the order options.'
def order_chunks(self, chunks):
cap = 1 for chunk in chunks: if (u'order' in chunk): if (not isinstance(chunk[u'order'], int)): continue chunk_order = chunk[u'order'] if ((chunk_order > (cap - 1)) and (chunk_order > 0)): cap = (chunk_order + 100) for chunk in chunks: if (u'order' not in chunk): chunk[u'order'] = cap continue if (not isinstance(chunk[u'order'], (int, float))): if (chunk[u'order'] == u'last'): chunk[u'order'] = (cap + 1000000) elif (chunk[u'order'] == u'first'): chunk[u'order'] = 0 else: chunk[u'order'] = cap if (u'name_order' in chunk): chunk[u'order'] = (chunk[u'order'] + (chunk.pop(u'name_order') / 10000.0)) if (chunk[u'order'] < 0): chunk[u'order'] = ((cap + 1000000) + chunk[u'order']) chunk[u'name'] = sdecode(chunk[u'name']) chunks.sort(key=(lambda chunk: (chunk[u'order'], u'{0[state]}{0[name]}{0[fun]}'.format(chunk)))) return chunks
'"Compile" the high data as it is retrieved from the CLI or YAML into the individual state executor structures'
def compile_high_data(self, high):
chunks = [] for (name, body) in six.iteritems(high): if name.startswith(u'__'): continue for (state, run) in six.iteritems(body): funcs = set() names = [] if state.startswith(u'__'): continue chunk = {u'state': state, u'name': name} if (u'__sls__' in body): chunk[u'__sls__'] = body[u'__sls__'] if (u'__env__' in body): chunk[u'__env__'] = body[u'__env__'] chunk[u'__id__'] = name for arg in run: if isinstance(arg, six.string_types): funcs.add(arg) continue if isinstance(arg, dict): for (key, val) in six.iteritems(arg): if (key == u'names'): for _name in val: if (_name not in names): names.append(_name) continue else: chunk.update(arg) if names: name_order = 1 for entry in names: live = copy.deepcopy(chunk) if isinstance(entry, dict): low_name = next(six.iterkeys(entry)) live[u'name'] = low_name list(map(live.update, entry[low_name])) else: live[u'name'] = entry live[u'name_order'] = name_order name_order = (name_order + 1) for fun in funcs: live[u'fun'] = fun chunks.append(live) else: live = copy.deepcopy(chunk) for fun in funcs: live[u'fun'] = fun chunks.append(live) chunks = self.order_chunks(chunks) return chunks
'Read in the __exclude__ list and remove all excluded objects from the high data'
def apply_exclude(self, high):
if (u'__exclude__' not in high): return high ex_sls = set() ex_id = set() exclude = high.pop(u'__exclude__') for exc in exclude: if isinstance(exc, six.string_types): ex_sls.add(exc) if isinstance(exc, dict): if (len(exc) != 1): continue key = next(six.iterkeys(exc)) if (key == u'sls'): ex_sls.add(exc[u'sls']) elif (key == u'id'): ex_id.add(exc[u'id']) if ex_sls: for (name, body) in six.iteritems(high): if name.startswith(u'__'): continue if (body.get(u'__sls__', u'') in ex_sls): ex_id.add(name) for id_ in ex_id: if (id_ in high): high.pop(id_) return high
'Whenever a state run starts, gather the pillar data fresh'
def _gather_pillar(self):
if self._pillar_override: if self._pillar_enc: try: self._pillar_override = salt.utils.crypt.decrypt(self._pillar_override, self._pillar_enc, translate_newlines=True, renderers=getattr(self, u'rend', None), opts=self.opts, valid_rend=self.opts[u'decrypt_pillar_renderers']) except Exception as exc: log.error(u'Failed to decrypt pillar override: %s', exc) if isinstance(self._pillar_override, six.string_types): try: self._pillar_override = yamlloader.load(self._pillar_override, Loader=yamlloader.SaltYamlSafeLoader) except Exception as exc: log.error(u'Failed to load CLI pillar override') log.exception(exc) if (not isinstance(self._pillar_override, dict)): log.error(u'Pillar override was not passed as a dictionary') self._pillar_override = None pillar = salt.pillar.get_pillar(self.opts, self.opts[u'grains'], self.opts[u'id'], self.opts[u'environment'], pillar_override=self._pillar_override, pillarenv=self.opts.get(u'pillarenv')) return pillar.compile_pillar()
'Check the module initialization function, if this is the first run of a state package that has a mod_init function, then execute the mod_init function in the state module.'
def _mod_init(self, low):
try: self.states[u'{0}.{1}'.format(low[u'state'], low[u'fun'])] except KeyError: return minit = u'{0}.mod_init'.format(low[u'state']) if (low[u'state'] not in self.mod_init): if (minit in self.states._dict): mret = self.states[minit](low) if (not mret): return self.mod_init.add(low[u'state'])
'Execute the aggregation systems to runtime modify the low chunk'
def _mod_aggregate(self, low, running, chunks):
agg_opt = self.functions[u'config.option'](u'state_aggregate') if (u'aggregate' in low): agg_opt = low[u'aggregate'] if (agg_opt is True): agg_opt = [low[u'state']] elif (not isinstance(agg_opt, list)): return low if ((low[u'state'] in agg_opt) and (not low.get(u'__agg__'))): agg_fun = u'{0}.mod_aggregate'.format(low[u'state']) if (agg_fun in self.states): try: low = self.states[agg_fun](low, chunks, running) low[u'__agg__'] = True except TypeError: log.error(u'Failed to execute aggregate for state %s', low[u'state']) return low
'Check that unless doesn\'t return 0, and that onlyif returns a 0.'
def _run_check(self, low_data):
ret = {u'result': False} cmd_opts = {} if (u'shell' in self.opts[u'grains']): cmd_opts[u'shell'] = self.opts[u'grains'].get(u'shell') if (u'onlyif' in low_data): if (not isinstance(low_data[u'onlyif'], list)): low_data_onlyif = [low_data[u'onlyif']] else: low_data_onlyif = low_data[u'onlyif'] for entry in low_data_onlyif: if (not isinstance(entry, six.string_types)): ret.update({u'comment': u'onlyif execution failed, bad type passed', u'result': False}) return ret cmd = self.functions[u'cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_opts) log.debug(u'Last command return code: %s', cmd) if ((cmd != 0) and (ret[u'result'] is False)): ret.update({u'comment': u'onlyif execution failed', u'skip_watch': True, u'result': True}) return ret elif (cmd == 0): ret.update({u'comment': u'onlyif execution succeeded', u'result': False}) return ret if (u'unless' in low_data): if (not isinstance(low_data[u'unless'], list)): low_data_unless = [low_data[u'unless']] else: low_data_unless = low_data[u'unless'] for entry in low_data_unless: if (not isinstance(entry, six.string_types)): ret.update({u'comment': u'unless execution failed, bad type passed', u'result': False}) return ret cmd = self.functions[u'cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_opts) log.debug(u'Last command return code: %s', cmd) if ((cmd == 0) and (ret[u'result'] is False)): ret.update({u'comment': u'unless execution succeeded', u'skip_watch': True, u'result': True}) elif (cmd != 0): ret.update({u'comment': u'unless execution failed', u'result': False}) return ret return ret
'Alter the way a successful state run is determined'
def _run_check_cmd(self, low_data):
ret = {u'result': False} cmd_opts = {} if (u'shell' in self.opts[u'grains']): cmd_opts[u'shell'] = self.opts[u'grains'].get(u'shell') for entry in low_data[u'check_cmd']: cmd = self.functions[u'cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_opts) log.debug(u'Last command return code: %s', cmd) if ((cmd == 0) and (ret[u'result'] is False)): ret.update({u'comment': u'check_cmd determined the state succeeded', u'result': True}) elif (cmd != 0): ret.update({u'comment': u'check_cmd determined the state failed', u'result': False}) return ret return ret
'Rest the run_num value to 0'
def reset_run_num(self):
self.__run_num = 0
'Read the state loader value and loadup the correct states subsystem'
def _load_states(self):
if (self.states_loader == u'thorium'): self.states = salt.loader.thorium(self.opts, self.functions, {}) else: self.states = salt.loader.states(self.opts, self.functions, self.utils, self.serializers, proxy=self.proxy)
'Load the modules into the state'
def load_modules(self, data=None, proxy=None):
log.info(u'Loading fresh modules for state activity') self.utils = salt.loader.utils(self.opts) self.functions = salt.loader.minion_mods(self.opts, self.state_con, utils=self.utils, proxy=self.proxy) if isinstance(data, dict): if data.get(u'provider', False): if isinstance(data[u'provider'], six.string_types): providers = [{data[u'state']: data[u'provider']}] elif isinstance(data[u'provider'], list): providers = data[u'provider'] else: providers = {} for provider in providers: for mod in provider: funcs = salt.loader.raw_mod(self.opts, provider[mod], self.functions) if funcs: for func in funcs: f_key = u'{0}{1}'.format(mod, func[func.rindex(u'.'):]) self.functions[f_key] = funcs[func] self.serializers = salt.loader.serializers(self.opts) self._load_states() self.rend = salt.loader.render(self.opts, self.functions, states=self.states, proxy=self.proxy)
'Refresh all the modules'
def module_refresh(self):
log.debug(u'Refreshing modules...') if (self.opts[u'grains'].get(u'os') != u'MacOS'): try: reload_module(site) except RuntimeError: log.error(u'Error encountered during module reload. Modules were not reloaded.') except TypeError: log.error(u'Error encountered during module reload. Modules were not reloaded.') self.load_modules() if ((not self.opts.get(u'local', False)) and self.opts.get(u'multiprocessing', True)): self.functions[u'saltutil.refresh_modules']()
'Check to see if the modules for this state instance need to be updated, only update if the state is a file or a package and if it changed something. If the file function is managed check to see if the file is a possible module type, e.g. a python, pyx, or .so. Always refresh if the function is recurse, since that can lay down anything.'
def check_refresh(self, data, ret):
_reload_modules = False if data.get(u'reload_grains', False): log.debug(u'Refreshing grains...') self.opts[u'grains'] = salt.loader.grains(self.opts) _reload_modules = True if data.get(u'reload_pillar', False): log.debug(u'Refreshing pillar...') self.opts[u'pillar'] = self._gather_pillar() _reload_modules = True if (not ret[u'changes']): if data.get(u'force_reload_modules', False): self.module_refresh() return if (data.get(u'reload_modules', False) or _reload_modules): self.module_refresh() return if (data[u'state'] == u'file'): if (data[u'fun'] == u'managed'): if data[u'name'].endswith((u'.py', u'.pyx', u'.pyo', u'.pyc', u'.so')): self.module_refresh() elif (data[u'fun'] == u'recurse'): self.module_refresh() elif (data[u'fun'] == u'symlink'): if (u'bin' in data[u'name']): self.module_refresh() elif (data[u'state'] in (u'pkg', u'ports')): self.module_refresh()
'Verify the state return data'
def verify_ret(self, ret):
if (not isinstance(ret, dict)): raise SaltException(u'Malformed state return, return must be a dict') bad = [] for val in [u'name', u'result', u'changes', u'comment']: if (val not in ret): bad.append(val) if bad: raise SaltException(u'The following keys were not present in the state return: {0}'.format(u','.join(bad)))
'Verify the data, return an error statement if something is wrong'
def verify_data(self, data):
errors = [] if (u'state' not in data): errors.append(u'Missing "state" data') if (u'fun' not in data): errors.append(u'Missing "fun" data') if (u'name' not in data): errors.append(u'Missing "name" data') if (data[u'name'] and (not isinstance(data[u'name'], six.string_types))): errors.append(u"ID '{0}' {1}is not formed as a string, but is a {2}".format(data[u'name'], (u"in SLS '{0}' ".format(data[u'__sls__']) if (u'__sls__' in data) else u''), type(data[u'name']).__name__)) if errors: return errors full = ((data[u'state'] + u'.') + data[u'fun']) if (full not in self.states): if (u'__sls__' in data): errors.append(u"State '{0}' was not found in SLS '{1}'".format(full, data[u'__sls__'])) reason = self.states.missing_fun_string(full) if reason: errors.append(u'Reason: {0}'.format(reason)) else: errors.append(u"Specified state '{0}' was not found".format(full)) else: aspec = salt.utils.args.get_function_argspec(self.states[full]) arglen = 0 deflen = 0 if isinstance(aspec.args, list): arglen = len(aspec.args) if isinstance(aspec.defaults, tuple): deflen = len(aspec.defaults) for ind in range((arglen - deflen)): if (aspec.args[ind] not in data): errors.append(u'Missing parameter {0} for state {1}'.format(aspec.args[ind], full)) reqdec = u'' if (u'require' in data): reqdec = u'require' if (u'watch' in data): if (u'{0}.mod_watch'.format(data[u'state']) not in self.states): if (u'require' in data): data[u'require'].extend(data.pop(u'watch')) else: data[u'require'] = data.pop(u'watch') reqdec = u'require' else: reqdec = u'watch' if reqdec: for req in data[reqdec]: reqfirst = next(iter(req)) if (data[u'state'] == reqfirst): if (fnmatch.fnmatch(data[u'name'], req[reqfirst]) or fnmatch.fnmatch(data[u'__id__'], req[reqfirst])): err = u'Recursive require detected in SLS {0} for require {1} in ID {2}'.format(data[u'__sls__'], req, data[u'__id__']) errors.append(err) return errors
'Verify that the high data is viable and follows the data structure'
def verify_high(self, high):
errors = [] if (not isinstance(high, dict)): errors.append(u'High data is not a dictionary and is invalid') reqs = OrderedDict() for (name, body) in six.iteritems(high): try: if name.startswith(u'__'): continue except AttributeError: pass if (not isinstance(name, six.string_types)): errors.append(u"ID '{0}' in SLS '{1}' is not formed as a string, but is a {2}. It may need to be quoted.".format(name, body[u'__sls__'], type(name).__name__)) if (not isinstance(body, dict)): err = u'The type {0} in {1} is not formatted as a dictionary'.format(name, body) errors.append(err) continue for state in body: if state.startswith(u'__'): continue if (body[state] is None): errors.append(u"ID '{0}' in SLS '{1}' contains a short declaration ({2}) with a trailing colon. When not passing any arguments to a state, the colon must be omitted.".format(name, body[u'__sls__'], state)) continue if (not isinstance(body[state], list)): errors.append(u"State '{0}' in SLS '{1}' is not formed as a list".format(name, body[u'__sls__'])) else: fun = 0 if (u'.' in state): fun += 1 for arg in body[state]: if isinstance(arg, six.string_types): fun += 1 if (u' ' in arg.strip()): errors.append(u'The function "{0}" in state "{1}" in SLS "{2}" has whitespace, a function with whitespace is not supported, perhaps this is an argument that is missing a ":"'.format(arg, name, body[u'__sls__'])) elif isinstance(arg, dict): argfirst = next(iter(arg)) if (argfirst == u'names'): if (not isinstance(arg[argfirst], list)): errors.append(u"The 'names' argument in state '{0}' in SLS '{1}' needs to be formed as a list".format(name, body[u'__sls__'])) if (argfirst in (u'require', u'watch', u'prereq', u'onchanges')): if (not isinstance(arg[argfirst], list)): errors.append(u"The {0} statement in state '{1}' in SLS '{2}' needs to be formed as a list".format(argfirst, name, body[u'__sls__'])) else: reqs[name] = OrderedDict(state=state) for req in arg[argfirst]: if isinstance(req, six.string_types): req = {u'id': req} if (not isinstance(req, dict)): err = u'Requisite declaration {0} in SLS {1} is not formed as a single key dictionary'.format(req, body[u'__sls__']) errors.append(err) continue req_key = next(iter(req)) req_val = req[req_key] if (u'.' in req_key): errors.append(u"Invalid requisite type '{0}' in state '{1}', in SLS '{2}'. Requisite types must not contain dots, did you mean '{3}'?".format(req_key, name, body[u'__sls__'], req_key[:req_key.find(u'.')])) if (not ishashable(req_val)): errors.append(u'Illegal requisite "{0}", please check your syntax.\n'.format(req_val)) continue reqs[name][req_val] = req_key if (req_val in reqs): if (name in reqs[req_val]): if (reqs[req_val][name] == state): if (reqs[req_val][u'state'] == reqs[name][req_val]): err = u'A recursive requisite was found, SLS "{0}" ID "{1}" ID "{2}"'.format(body[u'__sls__'], name, req_val) errors.append(err) if (len(list(arg)) != 1): errors.append(u"Multiple dictionaries defined in argument of state '{0}' in SLS '{1}'".format(name, body[u'__sls__'])) if (not fun): if ((state == u'require') or (state == u'watch')): continue errors.append(u"No function declared in state '{0}' in SLS '{1}'".format(state, body[u'__sls__'])) elif (fun > 1): errors.append(u"Too many functions declared in state '{0}' in SLS '{1}'".format(state, body[u'__sls__'])) return errors
'Verify the chunks in a list of low data structures'
def verify_chunks(self, chunks):
err = [] for chunk in chunks: err += self.verify_data(chunk) return err
'Sort the chunk list verifying that the chunks follow the order specified in the order options.'
def order_chunks(self, chunks):
cap = 1 for chunk in chunks: if (u'order' in chunk): if (not isinstance(chunk[u'order'], int)): continue chunk_order = chunk[u'order'] if ((chunk_order > (cap - 1)) and (chunk_order > 0)): cap = (chunk_order + 100) for chunk in chunks: if (u'order' not in chunk): chunk[u'order'] = cap continue if (not isinstance(chunk[u'order'], (int, float))): if (chunk[u'order'] == u'last'): chunk[u'order'] = (cap + 1000000) elif (chunk[u'order'] == u'first'): chunk[u'order'] = 0 else: chunk[u'order'] = cap if (u'name_order' in chunk): chunk[u'order'] = (chunk[u'order'] + (chunk.pop(u'name_order') / 10000.0)) if (chunk[u'order'] < 0): chunk[u'order'] = ((cap + 1000000) + chunk[u'order']) chunks.sort(key=(lambda chunk: (chunk[u'order'], u'{0[state]}{0[name]}{0[fun]}'.format(chunk)))) return chunks
'"Compile" the high data as it is retrieved from the CLI or YAML into the individual state executor structures'
def compile_high_data(self, high, orchestration_jid=None):
chunks = [] for (name, body) in six.iteritems(high): if name.startswith(u'__'): continue for (state, run) in six.iteritems(body): funcs = set() names = [] if state.startswith(u'__'): continue chunk = {u'state': state, u'name': name} if (orchestration_jid is not None): chunk[u'__orchestration_jid__'] = orchestration_jid if (u'__sls__' in body): chunk[u'__sls__'] = body[u'__sls__'] if (u'__env__' in body): chunk[u'__env__'] = body[u'__env__'] chunk[u'__id__'] = name for arg in run: if isinstance(arg, six.string_types): funcs.add(arg) continue if isinstance(arg, dict): for (key, val) in six.iteritems(arg): if (key == u'names'): for _name in val: if (_name not in names): names.append(_name) elif (key == u'state'): continue elif ((key == u'name') and (not isinstance(val, six.string_types))): chunk[key] = name else: chunk[key] = val if names: name_order = 1 for entry in names: live = copy.deepcopy(chunk) if isinstance(entry, dict): low_name = next(six.iterkeys(entry)) live[u'name'] = low_name list(map(live.update, entry[low_name])) else: live[u'name'] = entry live[u'name_order'] = name_order name_order += 1 for fun in funcs: live[u'fun'] = fun chunks.append(live) else: live = copy.deepcopy(chunk) for fun in funcs: live[u'fun'] = fun chunks.append(live) chunks = self.order_chunks(chunks) return chunks
'Pull the extend data and add it to the respective high data'
def reconcile_extend(self, high):
errors = [] if (u'__extend__' not in high): return (high, errors) ext = high.pop(u'__extend__') for ext_chunk in ext: for (name, body) in six.iteritems(ext_chunk): if (name not in high): state_type = next((x for x in body if (not x.startswith(u'__')))) ids = find_name(name, state_type, high) if (len(ids) != 1): errors.append(u"Cannot extend ID '{0}' in '{1}:{2}'. It is not part of the high state.\nThis is likely due to a missing include statement or an incorrectly typed ID.\nEnsure that a state with an ID of '{0}' is available\nin environment '{1}' and to SLS '{2}'".format(name, body.get(u'__env__', u'base'), body.get(u'__sls__', u'base'))) continue else: name = ids[0][0] for (state, run) in six.iteritems(body): if state.startswith(u'__'): continue if (state not in high[name]): high[name][state] = run continue for arg in run: update = False for hind in range(len(high[name][state])): if (isinstance(arg, six.string_types) and isinstance(high[name][state][hind], six.string_types)): high[name][state].pop(hind) high[name][state].insert(hind, arg) update = True continue if (isinstance(arg, dict) and isinstance(high[name][state][hind], dict)): argfirst = next(iter(arg)) if (argfirst == next(iter(high[name][state][hind]))): if (argfirst in STATE_REQUISITE_KEYWORDS): high[name][state][hind][argfirst].extend(arg[argfirst]) else: high[name][state][hind] = arg update = True if ((argfirst == u'name') and (next(iter(high[name][state][hind])) == u'names')): high[name][state][hind] = arg if (not update): high[name][state].append(arg) return (high, errors)
'Read in the __exclude__ list and remove all excluded objects from the high data'
def apply_exclude(self, high):
if (u'__exclude__' not in high): return high ex_sls = set() ex_id = set() exclude = high.pop(u'__exclude__') for exc in exclude: if isinstance(exc, six.string_types): ex_sls.add(exc) if isinstance(exc, dict): if (len(exc) != 1): continue key = next(six.iterkeys(exc)) if (key == u'sls'): ex_sls.add(exc[u'sls']) elif (key == u'id'): ex_id.add(exc[u'id']) if ex_sls: for (name, body) in six.iteritems(high): if name.startswith(u'__'): continue sls = body.get(u'__sls__', u'') if (not sls): continue for ex_ in ex_sls: if fnmatch.fnmatch(sls, ex_): ex_id.add(name) for id_ in ex_id: if (id_ in high): high.pop(id_) return high
'Extend the data reference with requisite_in arguments'
def requisite_in(self, high):
req_in = set([u'require_in', u'watch_in', u'onfail_in', u'onchanges_in', u'use', u'use_in', u'prereq', u'prereq_in']) req_in_all = req_in.union(set([u'require', u'watch', u'onfail', u'onfail_stop', u'onchanges'])) extend = {} errors = [] for (id_, body) in six.iteritems(high): if (not isinstance(body, dict)): continue for (state, run) in six.iteritems(body): if state.startswith(u'__'): continue for arg in run: if isinstance(arg, dict): if (len(arg) < 1): continue key = next(iter(arg)) if (key not in req_in): continue rkey = key.split(u'_')[0] items = arg[key] if isinstance(items, dict): for (_state, name) in six.iteritems(items): found = False if (name not in extend): extend[name] = OrderedDict() if (u'.' in _state): errors.append(u"Invalid requisite in {0}: {1} for {2}, in SLS '{3}'. Requisites must not contain dots, did you mean '{4}'?".format(rkey, _state, name, body[u'__sls__'], _state[:_state.find(u'.')])) _state = _state.split(u'.')[0] if (_state not in extend[name]): extend[name][_state] = [] extend[name][u'__env__'] = body[u'__env__'] extend[name][u'__sls__'] = body[u'__sls__'] for ind in range(len(extend[name][_state])): if (next(iter(extend[name][_state][ind])) == rkey): extend[name][_state][ind][rkey].append({state: id_}) found = True if found: continue extend[name][_state].append({rkey: [{state: id_}]}) if isinstance(items, list): hinges = [] for ind in items: if (not isinstance(ind, dict)): continue if (len(ind) < 1): continue pstate = next(iter(ind)) pname = ind[pstate] if (pstate == u'sls'): hinges = find_sls_ids(pname, high) else: hinges.append((pname, pstate)) if (u'.' in pstate): errors.append(u"Invalid requisite in {0}: {1} for {2}, in SLS '{3}'. Requisites must not contain dots, did you mean '{4}'?".format(rkey, pstate, pname, body[u'__sls__'], pstate[:pstate.find(u'.')])) pstate = pstate.split(u'.')[0] for tup in hinges: (name, _state) = tup if (key == u'prereq_in'): if (id_ not in extend): extend[id_] = OrderedDict() if (state not in extend[id_]): extend[id_][state] = [] extend[id_][state].append({u'prerequired': [{_state: name}]}) if (key == u'prereq'): ext_ids = find_name(name, _state, high) for (ext_id, _req_state) in ext_ids: if (ext_id not in extend): extend[ext_id] = OrderedDict() if (_req_state not in extend[ext_id]): extend[ext_id][_req_state] = [] extend[ext_id][_req_state].append({u'prerequired': [{state: id_}]}) continue if (key == u'use_in'): ext_ids = find_name(name, _state, high) for (ext_id, _req_state) in ext_ids: if (not ext_id): continue ext_args = state_args(ext_id, _state, high) if (ext_id not in extend): extend[ext_id] = OrderedDict() if (_req_state not in extend[ext_id]): extend[ext_id][_req_state] = [] ignore_args = req_in_all.union(ext_args) for arg in high[id_][state]: if (not isinstance(arg, dict)): continue if (len(arg) != 1): continue if (next(iter(arg)) in ignore_args): continue if (next(six.iterkeys(arg)) == u'name'): continue if (next(six.iterkeys(arg)) == u'names'): continue extend[ext_id][_req_state].append(arg) continue if (key == u'use'): ext_ids = find_name(name, _state, high) for (ext_id, _req_state) in ext_ids: if (not ext_id): continue loc_args = state_args(id_, state, high) if (id_ not in extend): extend[id_] = OrderedDict() if (state not in extend[id_]): extend[id_][state] = [] ignore_args = req_in_all.union(loc_args) for arg in high[ext_id][_req_state]: if (not isinstance(arg, dict)): continue if (len(arg) != 1): continue if (next(iter(arg)) in ignore_args): continue if (next(six.iterkeys(arg)) == u'name'): continue if (next(six.iterkeys(arg)) == u'names'): continue extend[id_][state].append(arg) continue found = False if (name not in extend): extend[name] = OrderedDict() if (_state not in extend[name]): extend[name][_state] = [] extend[name][u'__env__'] = body[u'__env__'] extend[name][u'__sls__'] = body[u'__sls__'] for ind in range(len(extend[name][_state])): if (next(iter(extend[name][_state][ind])) == rkey): extend[name][_state][ind][rkey].append({state: id_}) found = True if found: continue extend[name][_state].append({rkey: [{state: id_}]}) high[u'__extend__'] = [] for (key, val) in six.iteritems(extend): high[u'__extend__'].append({key: val}) (req_in_high, req_in_errors) = self.reconcile_extend(high) errors.extend(req_in_errors) return (req_in_high, errors)
'The target function to call that will create the parallel thread/process'
def _call_parallel_target(self, cdata, low):
tag = _gen_tag(low) try: ret = self.states[cdata[u'full']](*cdata[u'args'], **cdata[u'kwargs']) except Exception: trb = traceback.format_exc() if (len(cdata[u'args']) > 0): name = cdata[u'args'][0] elif (u'name' in cdata[u'kwargs']): name = cdata[u'kwargs'][u'name'] else: name = low.get(u'name', low.get(u'__id__')) ret = {u'result': False, u'name': name, u'changes': {}, u'comment': u'An exception occurred in this state: {0}'.format(trb)} troot = os.path.join(self.opts[u'cachedir'], self.jid) tfile = os.path.join(troot, tag) if (not os.path.isdir(troot)): try: os.makedirs(troot) except OSError: pass with salt.utils.files.fopen(tfile, u'wb+') as fp_: fp_.write(msgpack.dumps(ret))
'Call the state defined in the given cdata in parallel'
def call_parallel(self, cdata, low):
proc = salt.utils.process.MultiprocessingProcess(target=self._call_parallel_target, args=(cdata, low)) proc.start() ret = {u'name': cdata[u'args'][0], u'result': None, u'changes': {}, u'comment': u'Started in a seperate process', u'proc': proc} return ret
'Call a state directly with the low data structure, verify data before processing.'
def call(self, low, chunks=None, running=None, retries=1):
utc_start_time = datetime.datetime.utcnow() local_start_time = (utc_start_time - (datetime.datetime.utcnow() - datetime.datetime.now())) log.info(u'Running state [%s] at time %s', (low[u'name'].strip() if isinstance(low[u'name'], six.string_types) else low[u'name']), local_start_time.time().isoformat()) errors = self.verify_data(low) if errors: ret = {u'result': False, u'name': low[u'name'], u'changes': {}, u'comment': u''} for err in errors: ret[u'comment'] += u'{0}\n'.format(err) ret[u'__run_num__'] = self.__run_num self.__run_num += 1 format_log(ret) self.check_refresh(low, ret) return ret else: ret = {u'result': False, u'name': low[u'name'], u'changes': {}} self.state_con[u'runas'] = low.get(u'runas', None) if ((low[u'state'] == u'cmd') and (u'password' in low)): self.state_con[u'runas_password'] = low[u'password'] else: self.state_con[u'runas_password'] = low.get(u'runas_password', None) if (not low.get(u'__prereq__')): log.info(u'Executing state %s.%s for [%s]', low[u'state'], low[u'fun'], (low[u'name'].strip() if isinstance(low[u'name'], six.string_types) else low[u'name'])) if (u'provider' in low): self.load_modules(low) state_func_name = u'{0[state]}.{0[fun]}'.format(low) cdata = salt.utils.format_call(self.states[state_func_name], low, initial_ret={u'full': state_func_name}, expected_extra_kws=STATE_INTERNAL_KEYWORDS) inject_globals = {u'__low__': immutabletypes.freeze(low), u'__running__': (immutabletypes.freeze(running) if running else {}), u'__instance_id__': self.instance_id, u'__lowstate__': (immutabletypes.freeze(chunks) if chunks else {})} if self.inject_globals: inject_globals.update(self.inject_globals) if low.get(u'__prereq__'): test = sys.modules[self.states[cdata[u'full']].__module__].__opts__[u'test'] sys.modules[self.states[cdata[u'full']].__module__].__opts__[u'test'] = True try: if (((u'unless' in low) and (u'{0[state]}.mod_run_check'.format(low) not in self.states)) or ((u'onlyif' in low) and (u'{0[state]}.mod_run_check'.format(low) not in self.states))): ret.update(self._run_check(low)) if (u'saltenv' in low): inject_globals[u'__env__'] = six.text_type(low[u'saltenv']) elif isinstance(cdata[u'kwargs'].get(u'env', None), six.string_types): inject_globals[u'__env__'] = six.text_type(cdata[u'kwargs'][u'env']) elif (u'__env__' in low): inject_globals[u'__env__'] = six.text_type(low[u'__env__']) else: inject_globals[u'__env__'] = u'base' if (u'__orchestration_jid__' in low): inject_globals[u'__orchestration_jid__'] = low[u'__orchestration_jid__'] if ((u'result' not in ret) or (ret[u'result'] is False)): self.states.inject_globals = inject_globals if self.mocked: ret = mock_ret(cdata) elif ((not low.get(u'__prereq__')) and low.get(u'parallel')): ret = self.call_parallel(cdata, low) else: ret = self.states[cdata[u'full']](*cdata[u'args'], **cdata[u'kwargs']) self.states.inject_globals = {} if ((u'check_cmd' in low) and (u'{0[state]}.mod_run_check_cmd'.format(low) not in self.states)): ret.update(self._run_check_cmd(low)) self.verify_ret(ret) except Exception: trb = traceback.format_exc() if (len(cdata[u'args']) > 0): name = cdata[u'args'][0] elif (u'name' in cdata[u'kwargs']): name = cdata[u'kwargs'][u'name'] else: name = low.get(u'name', low.get(u'__id__')) ret = {u'result': False, u'name': name, u'changes': {}, u'comment': u'An exception occurred in this state: {0}'.format(trb)} finally: if low.get(u'__prereq__'): sys.modules[self.states[cdata[u'full']].__module__].__opts__[u'test'] = test self.state_con.pop('runas') self.state_con.pop('runas_password') if (u'warnings' in cdata): ret.setdefault(u'warnings', []).extend(cdata[u'warnings']) if (u'provider' in low): self.load_modules() if low.get(u'__prereq__'): low[u'__prereq__'] = False return ret ret[u'__sls__'] = low.get(u'__sls__') ret[u'__run_num__'] = self.__run_num self.__run_num += 1 format_log(ret) self.check_refresh(low, ret) utc_finish_time = datetime.datetime.utcnow() timezone_delta = (datetime.datetime.utcnow() - datetime.datetime.now()) local_finish_time = (utc_finish_time - timezone_delta) local_start_time = (utc_start_time - timezone_delta) ret[u'start_time'] = local_start_time.time().isoformat() delta = (utc_finish_time - utc_start_time) duration = (((delta.seconds * 1000000) + delta.microseconds) / 1000.0) ret[u'duration'] = duration ret[u'__id__'] = low[u'__id__'] log.info(u'Completed state [%s] at time %s (duration_in_ms=%s)', (low[u'name'].strip() if isinstance(low[u'name'], six.string_types) else low[u'name']), local_finish_time.time().isoformat(), duration) if (u'retry' in low): low[u'retry'] = self.verify_retry_data(low[u'retry']) if (not sys.modules[self.states[cdata[u'full']].__module__].__opts__[u'test']): if (low[u'retry'][u'until'] != ret[u'result']): if (low[u'retry'][u'attempts'] > retries): interval = low[u'retry'][u'interval'] if (low[u'retry'][u'splay'] != 0): interval = (interval + random.randint(0, low[u'retry'][u'splay'])) log.info(u'State result does not match retry until value, state will be re-run in %s seconds', interval) self.functions[u'test.sleep'](interval) retry_ret = self.call(low, chunks, running, retries=(retries + 1)) orig_ret = ret ret = retry_ret ret[u'comment'] = u'\n'.join([u'Attempt {0}: Returned a result of "{1}", with the following comment: "{2}"'.format(retries, orig_ret[u'result'], orig_ret[u'comment']), (u'' if (not ret[u'comment']) else ret[u'comment'])]) ret[u'duration'] = ((ret[u'duration'] + orig_ret[u'duration']) + (interval * 1000)) if (retries == 1): ret[u'start_time'] = orig_ret[u'start_time'] else: ret[u'comment'] = u' '.join([(u'' if (not ret[u'comment']) else ret[u'comment']), u'The state would be retried every {1} seconds (with a splay of up to {3} seconds) a maximum of {0} times or until a result of {2} is returned'.format(low[u'retry'][u'attempts'], low[u'retry'][u'interval'], low[u'retry'][u'until'], low[u'retry'][u'splay'])]) return ret
'verifies the specified retry data'
def verify_retry_data(self, retry_data):
retry_defaults = {u'until': True, u'attempts': 2, u'splay': 0, u'interval': 30} expected_data = {u'until': bool, u'attempts': int, u'interval': int, u'splay': int} validated_retry_data = {} if isinstance(retry_data, dict): for (expected_key, value_type) in six.iteritems(expected_data): if (expected_key in retry_data): if isinstance(retry_data[expected_key], value_type): validated_retry_data[expected_key] = retry_data[expected_key] else: log.warning(u"An invalid value was passed for the retry %s, using default value '%s'", expected_key, retry_defaults[expected_key]) validated_retry_data[expected_key] = retry_defaults[expected_key] else: validated_retry_data[expected_key] = retry_defaults[expected_key] else: log.warning(u'State is set to retry, but a valid dict for retry configuration was not found. Using retry defaults') validated_retry_data = retry_defaults return validated_retry_data
'Iterate over a list of chunks and call them, checking for requires.'
def call_chunks(self, chunks):
disabled = {} if (u'state_runs_disabled' in self.opts[u'grains']): for low in chunks[:]: state_ = u'{0}.{1}'.format(low[u'state'], low[u'fun']) for pat in self.opts[u'grains'][u'state_runs_disabled']: if fnmatch.fnmatch(state_, pat): comment = u'The state function "{0}" is currently disabled by "{1}", to re-enable, run state.enable {1}.'.format(state_, pat) _tag = _gen_tag(low) disabled[_tag] = {u'changes': {}, u'result': False, u'comment': comment, u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 chunks.remove(low) break running = {} for low in chunks: if (u'__FAILHARD__' in running): running.pop(u'__FAILHARD__') return running tag = _gen_tag(low) if (tag not in running): running = self.call_chunk(low, running, chunks) if self.check_failhard(low, running): return running self.active = set() while True: if self.reconcile_procs(running): break time.sleep(0.01) ret = dict((list(disabled.items()) + list(running.items()))) return ret
'Check if the low data chunk should send a failhard signal'
def check_failhard(self, low, running):
tag = _gen_tag(low) if self.opts.get(u'test', False): return False if ((low.get(u'failhard', False) or self.opts[u'failhard']) and (tag in running)): if (running[tag][u'result'] is None): return False return (not running[tag][u'result']) return False
'Check the running dict for processes and resolve them'
def reconcile_procs(self, running):
retset = set() for tag in running: proc = running[tag].get(u'proc') if proc: if (not proc.is_alive()): ret_cache = os.path.join(self.opts[u'cachedir'], self.jid, tag) if (not os.path.isfile(ret_cache)): ret = {u'result': False, u'comment': u'Parallel process failed to return', u'name': running[tag][u'name'], u'changes': {}} try: with salt.utils.files.fopen(ret_cache, u'rb') as fp_: ret = msgpack.loads(fp_.read()) except (OSError, IOError): ret = {u'result': False, u'comment': u'Parallel cache failure', u'name': running[tag][u'name'], u'changes': {}} running[tag].update(ret) running[tag].pop(u'proc') else: retset.add(False) return (False not in retset)
'Look into the running data to check the status of all requisite states'
def check_requisite(self, low, running, chunks, pre=False):
present = False if (u'watch' in low): if (u'{0}.mod_watch'.format(low[u'state']) not in self.states): if (u'require' in low): low[u'require'].extend(low.pop(u'watch')) else: low[u'require'] = low.pop(u'watch') else: present = True if (u'require' in low): present = True if (u'prerequired' in low): present = True if (u'prereq' in low): present = True if (u'onfail' in low): present = True if (u'onchanges' in low): present = True if (not present): return (u'met', ()) self.reconcile_procs(running) reqs = {u'require': [], u'watch': [], u'prereq': [], u'onfail': [], u'onchanges': []} if pre: reqs[u'prerequired'] = [] for r_state in reqs: if ((r_state in low) and (low[r_state] is not None)): for req in low[r_state]: if isinstance(req, six.string_types): req = {u'id': req} req = trim_req(req) found = False for chunk in chunks: req_key = next(iter(req)) req_val = req[req_key] if (req_val is None): continue if (req_key == u'sls'): if fnmatch.fnmatch(chunk[u'__sls__'], req_val): found = True reqs[r_state].append(chunk) continue try: if (fnmatch.fnmatch(chunk[u'name'], req_val) or fnmatch.fnmatch(chunk[u'__id__'], req_val)): if ((req_key == u'id') or (chunk[u'state'] == req_key)): found = True reqs[r_state].append(chunk) except KeyError as exc: raise SaltRenderError(u'Could not locate requisite of [{0}] present in state with name [{1}]'.format(req_key, chunk[u'name'])) except TypeError: raise SaltRenderError(u'Could not locate requisite of [{0}] present in state with name [{1}]'.format(req_key, chunk[u'name'])) if (not found): return (u'unmet', ()) fun_stats = set() for (r_state, chunks) in six.iteritems(reqs): if (r_state == u'prereq'): run_dict = self.pre else: run_dict = running for chunk in chunks: tag = _gen_tag(chunk) if (tag not in run_dict): fun_stats.add(u'unmet') continue if run_dict[tag].get(u'proc'): time.sleep(0.01) return self.check_requisite(low, running, chunks, pre) if (r_state == u'onfail'): if (run_dict[tag][u'result'] is True): fun_stats.add(u'onfail') continue elif (run_dict[tag][u'result'] is False): fun_stats.add(u'fail') continue if (r_state == u'onchanges'): if (not run_dict[tag][u'changes']): fun_stats.add(u'onchanges') else: fun_stats.add(u'onchangesmet') continue if ((r_state == u'watch') and run_dict[tag][u'changes']): fun_stats.add(u'change') continue if ((r_state == u'prereq') and (run_dict[tag][u'result'] is None)): fun_stats.add(u'premet') if ((r_state == u'prereq') and (not (run_dict[tag][u'result'] is None))): fun_stats.add(u'pre') else: fun_stats.add(u'met') if (u'unmet' in fun_stats): status = u'unmet' elif (u'fail' in fun_stats): status = u'fail' elif (u'pre' in fun_stats): if (u'premet' in fun_stats): status = u'met' else: status = u'pre' elif ((u'onfail' in fun_stats) and (u'met' not in fun_stats)): status = u'onfail' elif ((u'onchanges' in fun_stats) and (u'onchangesmet' not in fun_stats)): status = u'onchanges' elif (u'change' in fun_stats): status = u'change' else: status = u'met' return (status, reqs)
'Fire an event on the master bus If `fire_event` is set to True an event will be sent with the chunk name in the tag and the chunk result in the event data. If `fire_event` is set to a string such as `mystate/is/finished`, an event will be sent with the string added to the tag and the chunk result in the event data. If the `state_events` is set to True in the config, then after the chunk is evaluated an event will be set up to the master with the results.'
def event(self, chunk_ret, length, fire_event=False):
if ((not self.opts.get(u'local')) and (self.opts.get(u'state_events', True) or fire_event)): if (not self.opts.get(u'master_uri')): ev_func = (lambda ret, tag, preload=None: salt.utils.event.get_master_event(self.opts, self.opts[u'sock_dir'], listen=False).fire_event(ret, tag)) else: ev_func = self.functions[u'event.fire_master'] ret = {u'ret': chunk_ret} if (fire_event is True): tag = salt.utils.event.tagify([self.jid, self.opts[u'id'], str(chunk_ret[u'name'])], u'state_result') elif isinstance(fire_event, six.string_types): tag = salt.utils.event.tagify([self.jid, self.opts[u'id'], str(fire_event)], u'state_result') else: tag = salt.utils.event.tagify([self.jid, u'prog', self.opts[u'id'], str(chunk_ret[u'__run_num__'])], u'job') ret[u'len'] = length preload = {u'jid': self.jid} ev_func(ret, tag, preload=preload)
'Check if a chunk has any requires, execute the requires and then the chunk'
def call_chunk(self, low, running, chunks):
low = self._mod_aggregate(low, running, chunks) self._mod_init(low) tag = _gen_tag(low) if (not low.get(u'prerequired')): self.active.add(tag) requisites = [u'require', u'watch', u'prereq', u'onfail', u'onchanges'] if (not low.get(u'__prereq__')): requisites.append(u'prerequired') (status, reqs) = self.check_requisite(low, running, chunks, pre=True) else: (status, reqs) = self.check_requisite(low, running, chunks) if (status == u'unmet'): lost = {} reqs = [] for requisite in requisites: lost[requisite] = [] if (requisite not in low): continue for req in low[requisite]: if isinstance(req, six.string_types): req = {u'id': req} req = trim_req(req) found = False req_key = next(iter(req)) req_val = req[req_key] for chunk in chunks: if (req_val is None): continue if (req_key == u'sls'): if fnmatch.fnmatch(chunk[u'__sls__'], req_val): if (requisite == u'prereq'): chunk[u'__prereq__'] = True reqs.append(chunk) found = True continue if (fnmatch.fnmatch(chunk[u'name'], req_val) or fnmatch.fnmatch(chunk[u'__id__'], req_val)): if ((req_key == u'id') or (chunk[u'state'] == req_key)): if (requisite == u'prereq'): chunk[u'__prereq__'] = True elif (requisite == u'prerequired'): chunk[u'__prerequired__'] = True reqs.append(chunk) found = True if (not found): lost[requisite].append(req) if (lost[u'require'] or lost[u'watch'] or lost[u'prereq'] or lost[u'onfail'] or lost[u'onchanges'] or lost.get(u'prerequired')): comment = u'The following requisites were not found:\n' for (requisite, lreqs) in six.iteritems(lost): if (not lreqs): continue comment += u'{0}{1}:\n'.format((u' ' * 19), requisite) for lreq in lreqs: req_key = next(iter(lreq)) req_val = lreq[req_key] comment += u'{0}{1}: {2}\n'.format((u' ' * 23), req_key, req_val) running[tag] = {u'changes': {}, u'result': False, u'comment': comment, u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 self.event(running[tag], len(chunks), fire_event=low.get(u'fire_event')) return running for chunk in reqs: ctag = _gen_tag(chunk) if (ctag not in running): if (ctag in self.active): if chunk.get(u'__prerequired__'): if (tag not in self.pre): low[u'__prereq__'] = True self.pre[ctag] = self.call(low, chunks, running) return running else: return running elif (ctag not in running): log.error(u'Recursive requisite found') running[tag] = {u'changes': {}, u'result': False, u'comment': u'Recursive requisite found', u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 self.event(running[tag], len(chunks), fire_event=low.get(u'fire_event')) return running running = self.call_chunk(chunk, running, chunks) if self.check_failhard(chunk, running): running[u'__FAILHARD__'] = True return running if low.get(u'__prereq__'): (status, reqs) = self.check_requisite(low, running, chunks) self.pre[tag] = self.call(low, chunks, running) if ((not self.pre[tag][u'changes']) and (status == u'change')): self.pre[tag][u'changes'] = {u'watch': u'watch'} self.pre[tag][u'result'] = None else: running = self.call_chunk(low, running, chunks) if self.check_failhard(chunk, running): running[u'__FAILHARD__'] = True return running elif (status == u'met'): if low.get(u'__prereq__'): self.pre[tag] = self.call(low, chunks, running) else: running[tag] = self.call(low, chunks, running) elif (status == u'fail'): if (tag in self.pre): running[tag] = self.pre[tag] running[tag][u'__run_num__'] = self.__run_num running[tag][u'__sls__'] = low[u'__sls__'] else: failed_requisites = set() for req_lows in six.itervalues(reqs): for req_low in req_lows: req_tag = _gen_tag(req_low) req_ret = self.pre.get(req_tag, running.get(req_tag)) if (req_ret is None): continue if (req_ret[u'result'] is False): key = u'{sls}.{_id}'.format(sls=req_low[u'__sls__'], _id=req_low[u'__id__']) failed_requisites.add(key) _cmt = u'One or more requisite failed: {0}'.format(u', '.join((str(i) for i in failed_requisites))) running[tag] = {u'changes': {}, u'result': False, u'comment': _cmt, u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 elif ((status == u'change') and (not low.get(u'__prereq__'))): ret = self.call(low, chunks, running) if ((not ret[u'changes']) and (not ret.get(u'skip_watch', False))): low = low.copy() low[u'sfun'] = low[u'fun'] low[u'fun'] = u'mod_watch' low[u'__reqs__'] = reqs ret = self.call(low, chunks, running) running[tag] = ret elif (status == u'pre'): pre_ret = {u'changes': {}, u'result': True, u'comment': u'No changes detected', u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} running[tag] = pre_ret self.pre[tag] = pre_ret self.__run_num += 1 elif (status == u'onfail'): running[tag] = {u'changes': {}, u'result': True, u'comment': u'State was not run because onfail req did not change', u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 elif (status == u'onchanges'): running[tag] = {u'changes': {}, u'result': True, u'comment': u'State was not run because none of the onchanges reqs changed', u'__run_num__': self.__run_num, u'__sls__': low[u'__sls__']} self.__run_num += 1 elif low.get(u'__prereq__'): self.pre[tag] = self.call(low, chunks, running) else: running[tag] = self.call(low, chunks, running) if (tag in running): self.event(running[tag], len(chunks), fire_event=low.get(u'fire_event')) return running
'Find all of the listen routines and call the associated mod_watch runs'
def call_listen(self, chunks, running):
listeners = [] crefs = {} for chunk in chunks: crefs[(chunk[u'state'], chunk[u'name'])] = chunk crefs[(chunk[u'state'], chunk[u'__id__'])] = chunk if (u'listen' in chunk): listeners.append({(chunk[u'state'], chunk[u'__id__']): chunk[u'listen']}) if (u'listen_in' in chunk): for l_in in chunk[u'listen_in']: for (key, val) in six.iteritems(l_in): listeners.append({(key, val): [{chunk[u'state']: chunk[u'__id__']}]}) mod_watchers = [] errors = {} for l_dict in listeners: for (key, val) in six.iteritems(l_dict): for listen_to in val: if (not isinstance(listen_to, dict)): continue for (lkey, lval) in six.iteritems(listen_to): if ((lkey, lval) not in crefs): rerror = {_l_tag(lkey, lval): {u'comment': u'Referenced state {0}: {1} does not exist'.format(lkey, lval), u'name': u'listen_{0}:{1}'.format(lkey, lval), u'result': False, u'changes': {}}} errors.update(rerror) continue to_tag = _gen_tag(crefs[(lkey, lval)]) if (to_tag not in running): continue if running[to_tag][u'changes']: if (key not in crefs): rerror = {_l_tag(key[0], key[1]): {u'comment': u'Referenced state {0}: {1} does not exist'.format(key[0], key[1]), u'name': u'listen_{0}:{1}'.format(key[0], key[1]), u'result': False, u'changes': {}}} errors.update(rerror) continue chunk = crefs[key] low = chunk.copy() low[u'sfun'] = chunk[u'fun'] low[u'fun'] = u'mod_watch' low[u'__id__'] = u'listener_{0}'.format(low[u'__id__']) for req in STATE_REQUISITE_KEYWORDS: if (req in low): low.pop(req) mod_watchers.append(low) ret = self.call_chunks(mod_watchers) running.update(ret) for err in errors: errors[err][u'__run_num__'] = self.__run_num self.__run_num += 1 running.update(errors) return running
'Process a high data call and ensure the defined states.'
def call_high(self, high, orchestration_jid=None):
errors = [] (high, ext_errors) = self.reconcile_extend(high) errors += ext_errors errors += self.verify_high(high) if errors: return errors (high, req_in_errors) = self.requisite_in(high) errors += req_in_errors high = self.apply_exclude(high) if errors: return errors chunks = self.compile_high_data(high, orchestration_jid) if errors: return errors ret = self.call_chunks(chunks) ret = self.call_listen(chunks, ret) def _cleanup_accumulator_data(): accum_data_path = os.path.join(salt.utils.get_accumulator_dir(self.opts[u'cachedir']), self.instance_id) try: os.remove(accum_data_path) log.debug(u'Deleted accumulator data file %s', accum_data_path) except OSError: log.debug(u'File %s does not exist, no need to cleanup', accum_data_path) _cleanup_accumulator_data() return ret
'Enforce the states in a template'
def call_template(self, template):
high = compile_template(template, self.rend, self.opts[u'renderer'], self.opts[u'renderer_blacklist'], self.opts[u'renderer_whitelist']) if (not high): return high (high, errors) = self.render_template(high, template) if errors: return errors return self.call_high(high)
'Enforce the states in a template, pass the template as a string'
def call_template_str(self, template):
high = compile_template_str(template, self.rend, self.opts[u'renderer'], self.opts[u'renderer_blacklist'], self.opts[u'renderer_whitelist']) if (not high): return high (high, errors) = self.render_template(high, u'<template-str>') if errors: return errors return self.call_high(high)
'Gather the lists of available sls data from the master'
def __gather_avail(self):
avail = {} for saltenv in self._get_envs(): avail[saltenv] = self.client.list_states(saltenv) return avail
'The options used by the High State object are derived from options on the minion and the master, or just the minion if the high state call is entirely local.'
def __gen_opts(self, opts):
if (u'local_state' in opts): if opts[u'local_state']: return opts mopts = self.client.master_opts() if (not isinstance(mopts, dict)): opts[u'renderer'] = u'yaml_jinja' opts[u'failhard'] = False opts[u'state_top'] = salt.utils.url.create(u'top.sls') opts[u'nodegroups'] = {} opts[u'file_roots'] = {u'base': [syspaths.BASE_FILE_ROOTS_DIR]} else: opts[u'renderer'] = mopts[u'renderer'] opts[u'failhard'] = mopts.get(u'failhard', False) if mopts[u'state_top'].startswith(u'salt://'): opts[u'state_top'] = mopts[u'state_top'] elif mopts[u'state_top'].startswith(u'/'): opts[u'state_top'] = salt.utils.url.create(mopts[u'state_top'][1:]) else: opts[u'state_top'] = salt.utils.url.create(mopts[u'state_top']) opts[u'state_top_saltenv'] = mopts.get(u'state_top_saltenv', None) opts[u'nodegroups'] = mopts.get(u'nodegroups', {}) opts[u'state_auto_order'] = mopts.get(u'state_auto_order', opts[u'state_auto_order']) opts[u'file_roots'] = mopts[u'file_roots'] opts[u'top_file_merging_strategy'] = mopts.get(u'top_file_merging_strategy', opts.get(u'top_file_merging_strategy')) opts[u'env_order'] = mopts.get(u'env_order', opts.get(u'env_order', [])) opts[u'default_top'] = mopts.get(u'default_top', opts.get(u'default_top')) opts[u'state_events'] = mopts.get(u'state_events') opts[u'state_aggregate'] = mopts.get(u'state_aggregate', opts.get(u'state_aggregate', False)) opts[u'jinja_lstrip_blocks'] = mopts.get(u'jinja_lstrip_blocks', False) opts[u'jinja_trim_blocks'] = mopts.get(u'jinja_trim_blocks', False) return opts
'Pull the file server environments out of the master options'
def _get_envs(self):
envs = [u'base'] if (u'file_roots' in self.opts): envs.extend([x for x in list(self.opts[u'file_roots']) if (x not in envs)]) env_order = self.opts.get(u'env_order', []) members = set() env_order = [env for env in env_order if (not ((env in members) or members.add(env)))] client_envs = self.client.envs() if (env_order and client_envs): return [env for env in env_order if (env in client_envs)] elif env_order: return env_order else: envs.extend([env for env in client_envs if (env not in envs)]) return envs
'Gather the top files'
def get_tops(self):
tops = DefaultOrderedDict(list) include = DefaultOrderedDict(list) done = DefaultOrderedDict(list) found = 0 merging_strategy = self.opts[u'top_file_merging_strategy'] if ((merging_strategy == u'same') and (not self.opts[u'environment'])): if (not self.opts[u'default_top']): raise SaltRenderError(u"top_file_merging_strategy set to 'same', but no default_top configuration option was set") if self.opts[u'environment']: contents = self.client.cache_file(self.opts[u'state_top'], self.opts[u'environment']) if contents: found = 1 tops[self.opts[u'environment']] = [compile_template(contents, self.state.rend, self.state.opts[u'renderer'], self.state.opts[u'renderer_blacklist'], self.state.opts[u'renderer_whitelist'], saltenv=self.opts[u'environment'])] else: tops[self.opts[u'environment']] = [{}] else: found = 0 state_top_saltenv = self.opts.get(u'state_top_saltenv', False) if (state_top_saltenv and (not isinstance(state_top_saltenv, six.string_types))): state_top_saltenv = str(state_top_saltenv) for saltenv in ([state_top_saltenv] if state_top_saltenv else self._get_envs()): contents = self.client.cache_file(self.opts[u'state_top'], saltenv) if contents: found = (found + 1) tops[saltenv].append(compile_template(contents, self.state.rend, self.state.opts[u'renderer'], self.state.opts[u'renderer_blacklist'], self.state.opts[u'renderer_whitelist'], saltenv=saltenv)) else: tops[saltenv].append({}) log.debug(u"No contents loaded for saltenv '%s'", saltenv) if ((found > 1) and (merging_strategy == u'merge') and (not self.opts.get(u'env_order', None))): log.warning(u"top_file_merging_strategy is set to '%s' and multiple top files were found. Merging order is not deterministic, it may be desirable to either set top_file_merging_strategy to 'same' or use the 'env_order' configuration parameter to specify the merging order.", merging_strategy) if (found == 0): log.debug(u"No contents found in top file. If this is not expected, verify that the 'file_roots' specified in 'etc/master' are accessible. The 'file_roots' configuration is: %s", repr(self.state.opts[u'file_roots'])) for (saltenv, ctops) in six.iteritems(tops): for ctop in ctops: if (u'include' not in ctop): continue for sls in ctop[u'include']: include[saltenv].append(sls) ctop.pop(u'include') while include: pops = [] for (saltenv, states) in six.iteritems(include): pops.append(saltenv) if (not states): continue for sls_match in states: for sls in fnmatch.filter(self.avail[saltenv], sls_match): if (sls in done[saltenv]): continue tops[saltenv].append(compile_template(self.client.get_state(sls, saltenv).get(u'dest', False), self.state.rend, self.state.opts[u'renderer'], self.state.opts[u'renderer_blacklist'], self.state.opts[u'renderer_whitelist'], saltenv)) done[saltenv].append(sls) for saltenv in pops: if (saltenv in include): include.pop(saltenv) return tops
'Cleanly merge the top files'
def merge_tops(self, tops):
merging_strategy = self.opts[u'top_file_merging_strategy'] try: merge_attr = u'_merge_tops_{0}'.format(merging_strategy) merge_func = getattr(self, merge_attr) if (not hasattr(merge_func, u'__call__')): msg = u"'{0}' is not callable".format(merge_attr) log.error(msg) raise TypeError(msg) except (AttributeError, TypeError): log.warning(u"Invalid top_file_merging_strategy '%s', falling back to 'merge'", merging_strategy) merge_func = self._merge_tops_merge return merge_func(tops)