desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Purge trigger instances which match the criteria defined in the config.'
def _purge_trigger_instances(self):
LOG.info('Performing garbage collection for trigger instances') utc_now = get_datetime_utc_now() timestamp = (utc_now - datetime.timedelta(days=self._trigger_instances_ttl)) if (timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS))): raise ValueError('Calculated timestamp would violate the minimum TTL constraint') timestamp_str = isotime.format(dt=timestamp) LOG.info(('Deleting trigger instances older than: %s' % timestamp_str)) assert (timestamp < utc_now) try: purge_trigger_instances(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception(('Failed to trigger instances: %s' % str(e))) return True
':param sensor_service: Sensor Service instance. :type sensor_service: :class:``st2reactor.container.sensor_wrapper.SensorService`` :keyword config: Sensor config. :type config: ``dict`` or None'
def __init__(self, sensor_service, config=None):
self._sensor_service = sensor_service self.sensor_service = sensor_service self._config = (config or {}) self.config = self._config
'Run the sensor initialization / setup code (if any).'
@abc.abstractmethod def setup(self):
pass
'Run the sensor.'
@abc.abstractmethod def run(self):
pass
'Run the sensor cleanup code (if any).'
@abc.abstractmethod def cleanup(self):
pass
'Runs when trigger is created'
@abc.abstractmethod def add_trigger(self, trigger):
pass
'Runs when trigger is updated'
@abc.abstractmethod def update_trigger(self, trigger):
pass
'Runs when trigger is deleted'
@abc.abstractmethod def remove_trigger(self, trigger):
pass
'Poll 3rd party system for new information.'
@abc.abstractmethod def poll(self):
pass
'Retrieve current poll interval. :return: Current poll interval. :rtype: ``float``'
def get_poll_interval(self):
return self._poll_interval
'Set the poll interval. :param poll_interval: Poll interval to use. :type poll_interval: ``float``'
def set_poll_interval(self, poll_interval):
self._poll_interval = poll_interval
'Creates a RuleEnforcer matching to each rule. This method is trigger_instance specific therefore if creation of 1 RuleEnforcer fails it is likely that all wil be broken.'
def create_rule_enforcers(self, trigger_instance, matching_rules):
enforcers = [] for matching_rule in matching_rules: enforcers.append(RuleEnforcer(trigger_instance, matching_rule)) return enforcers
':param rule_file_path: Path to the file containing rule definition. :type rule_file_path: ``str`` :param trigger_instance_file_path: Path to the file containg trigger instance definition. :type trigger_instance_file_path: ``str``'
def __init__(self, rule_file_path=None, rule_ref=None, trigger_instance_file_path=None, trigger_instance_id=None):
self._rule_file_path = rule_file_path self._rule_ref = rule_ref self._trigger_instance_file_path = trigger_instance_file_path self._trigger_instance_id = trigger_instance_id self._meta_loader = MetaLoader()
'Evaluate trigger instance against the rule. :return: ``True`` if the rule matches, ``False`` otherwise. :rtype: ``boolean``'
def evaluate(self):
rule_db = self._get_rule_db() (trigger_instance_db, trigger_db) = self._get_trigger_instance_db() if (rule_db.trigger != trigger_db.ref): LOG.info('rule.trigger "%s" and trigger.ref "%s" do not match.', rule_db.trigger, trigger_db.ref) return False matcher = RulesMatcher(trigger_instance=trigger_instance_db, trigger=trigger_db, rules=[rule_db], extra_info=True) matching_rules = matcher.get_matching_rules() if (len(matching_rules) < 1): return False try: enforcer = RuleEnforcer(trigger_instance=trigger_instance_db, rule=rule_db) params = enforcer.get_resolved_parameters() LOG.info('Action parameters resolved to:') for param in six.iteritems(params): LOG.info(' DCTB %s: %s', param[0], param[1]) return True except (UndefinedError, ValueError) as e: LOG.error('Failed to resolve parameters\n DCTB Original error : %s', str(e)) return False except: LOG.exception('Failed to resolve parameters.') return False
'TriggerInstance from message is create prior to acknowledging the message. This gets us a way to not acknowledge messages.'
def pre_ack_process(self, message):
trigger = message['trigger'] payload = message['payload'] trigger_instance = container_utils.create_trigger_instance(trigger, (payload or {}), date_utils.get_datetime_utc_now(), raise_on_no_trigger=True) return self._compose_pre_ack_process_response(trigger_instance, message)
'Codify response of the pre_ack_process method.'
@staticmethod def _compose_pre_ack_process_response(trigger_instance, message):
return {'trigger_instance': trigger_instance, 'message': message}
'Break-down response of pre_ack_process into constituents for simpler consumption.'
@staticmethod def _decompose_pre_ack_process_response(response):
return (response.get('trigger_instance', None), response.get('message', None))
':rtype: ``dict`` trace_context as a dict; could be None'
def _update_trace(self):
trace_db = None try: trace_db = trace_service.get_trace_db_by_trigger_instance(self.trigger_instance) except: LOG.exception('No Trace found for TriggerInstance %s.', self.trigger_instance.id) return None assert trace_db trace_db = trace_service.add_or_update_given_trace_db(trace_db=trace_db, rules=[trace_service.get_trace_component_for_rule(self.rule, self.trigger_instance)]) return vars(TraceContext(id_=str(trace_db.id), trace_tag=trace_db.trace_tag))
'Schedule an action execution. :type action_exec_spec: :class:`ActionExecutionSpecDB` :param params: Parameters to execute the action with. :type params: ``dict`` :rtype: :class:`LiveActionDB` on successful schedueling, None otherwise.'
@staticmethod def _invoke_action(action_exec_spec, params, context=None):
action_ref = action_exec_spec['ref'] params = action_param_utils.cast_params(action_ref, params) liveaction = LiveActionDB(action=action_ref, context=context, parameters=params) (liveaction, execution) = action_service.request(liveaction) return execution
':param trigger_instance: TriggerInstance DB object. :type trigger_instance: :class:`TriggerInstanceDB`` :param trigger: Trigger DB object. :type trigger: :class:`TriggerDB` :param rule: Rule DB object. :type rule: :class:`RuleDB`'
def __init__(self, trigger_instance, trigger, rule, extra_info=False):
self.trigger_instance = trigger_instance self.trigger = trigger self.rule = rule self.extra_info = extra_info self._base_logger_context = {'rule': self.rule, 'trigger': self.trigger, 'trigger_instance': self.trigger_instance}
'Return true if the rule is applicable to the provided trigger instance. :rtype: ``bool``'
def filter(self):
LOG.info('Validating rule %s for %s.', self.rule.ref, self.trigger['name'], extra=self._base_logger_context) if (not self.rule.enabled): if self.extra_info: LOG.info('Validation failed for rule %s as it is disabled.', self.rule.ref) return False criteria = self.rule.criteria is_rule_applicable = True if (criteria and (not self.trigger_instance.payload)): return False payload_lookup = PayloadLookup(self.trigger_instance.payload) LOG.debug('Trigger payload: %s', self.trigger_instance.payload, extra=self._base_logger_context) for criterion_k in criteria.keys(): criterion_v = criteria[criterion_k] (is_rule_applicable, payload_value, criterion_pattern) = self._check_criterion(criterion_k, criterion_v, payload_lookup) if (not is_rule_applicable): if self.extra_info: criteria_extra_info = '\n'.join([(' key: %s' % criterion_k), (' pattern: %s' % criterion_pattern), (' type: %s' % criterion_v['type']), (' payload: %s' % payload_value)]) LOG.info('Validation for rule %s failed on criteria -\n%s', self.rule.ref, criteria_extra_info, extra=self._base_logger_context) break if (not is_rule_applicable): LOG.debug('Rule %s not applicable for %s.', self.rule.id, self.trigger['name'], extra=self._base_logger_context) return is_rule_applicable
':param trigger_instance: TriggerInstance DB object. :type trigger_instance: :class:`TriggerInstanceDB`` :param trigger: Trigger DB object. :type trigger: :class:`TriggerDB` :param rule: Rule DB object. :type rule: :class:`RuleDB` :param first_pass_matched: Rules that matched in the first pass. :type first_pass_matched: `list`'
def __init__(self, trigger_instance, trigger, rule, first_pass_matched):
super(SecondPassRuleFilter, self).__init__(trigger_instance, trigger, rule) self.first_pass_matched = first_pass_matched
'Splits the rules in the Matcher into first_pass and second_pass collections. Since the'
def _split_rules_into_passes(self):
first_pass = [] second_pass = [] for rule in self.rules: if self._is_first_pass_rule(rule): first_pass.append(rule) else: second_pass.append(rule) return (first_pass, second_pass)
'Edge cases to test: default branch is master, ref is pack version default branch is master, ref is branch name default branch is master, ref is default branch name default branch is not master, ref is pack version default branch is not master, ref is branch name default branch is not master, ref is default branch name'
def test_run_pack_download_edge_cases(self):
def side_effect(ref): if (ref[0] != 'v'): raise BadName() return mock.MagicMock(hexsha='abcdeF') self.repo_instance.commit.side_effect = side_effect edge_cases = {'master': '1.2.3', 'master': 'some-branch', 'master': 'default-branch', 'master': None, 'default-branch': '1.2.3', 'default-branch': 'some-branch', 'default-branch': 'default-branch', 'default-branch': None} for (default_branch, ref) in edge_cases.items(): self.repo_instance.git = mock.MagicMock(branch=(lambda *args: default_branch), checkout=(lambda *args: True)) self.repo_instance.active_branch.name = default_branch self.repo_instance.active_branch.object = 'aBcdef' self.repo_instance.head.commit = 'aBcdef' gitref = mock.MagicMock(hexsha='abcDef') def fake_commit(arg_ref): if (arg_ref == ref): return gitref else: raise BadName() self.repo_instance.commit = fake_commit self.repo_instance.active_branch.object = gitref action = self.get_action_instance() if ref: packs = [('test=%s' % ref)] else: packs = ['test'] result = action.run(packs=packs, abs_repo_base=self.repo_base) self.assertEqual(result, {'test': 'Success.'})
'Run the provided client method and format the result. :param method: Client method to run. :type method: ``func`` :param method_kwargs: Keyword arguments passed to the client method. :type method_kwargs: ``dict`` :param format_func: Function for formatting the result. :type format_func: ``func`` :rtype: ``list`` of ``dict``'
def _run_client_method(self, method, method_kwargs, format_func, format_kwargs=None):
method_kwargs = filter_none_values(method_kwargs) method_name = method.__name__ self.logger.debug(('Calling client method "%s" with kwargs "%s"' % (method_name, method_kwargs))) result = method(**method_kwargs) result = format_func(result, **(format_kwargs or {})) return result
':param pack: Installed Pack Name to get info about :type pack: ``str``'
def run(self, pack):
packs_base_paths = get_packs_base_paths() pack_path = None metadata_file = None for packs_base_path in packs_base_paths: pack_path = os.path.join(packs_base_path, pack) pack_yaml_path = os.path.join(pack_path, MANIFEST_FILE_NAME) if os.path.isfile(pack_yaml_path): metadata_file = pack_yaml_path break if (not os.path.isdir(pack_path)): return {'pack': None, 'git_status': None} if (not metadata_file): error = ('Pack "%s" doesn\'t contain pack.yaml file.' % pack) raise Exception(error) try: details = self._parse_yaml_file(metadata_file) except Exception as e: error = ('Pack "%s" doesn\'t contain a valid pack.yaml file: %s' % (pack, str(e))) raise Exception(error) try: repo = Repo(pack_path) git_status = ('Status:\n%s\n\nRemotes:\n%s' % (repo.git.status().split('\n')[0], '\n'.join([remote.url for remote in repo.remotes]))) ahead_behind = repo.git.rev_list('--left-right', '--count', 'HEAD...origin/master').split() if (ahead_behind != [u'0', u'0']): git_status += '\n\n' git_status += ('%s commits ahead ' if (ahead_behind[0] != u'0') else '') git_status += ('and ' if (u'0' not in ahead_behind) else '') git_status += ('%s commits behind ' if (ahead_behind[1] != u'0') else '') git_status += 'origin/master.' except InvalidGitRepositoryError: git_status = None return {'pack': details, 'git_status': git_status}
'Will recursively apply permission 770 to pack and its contents.'
def _apply_pack_permissions(self, pack_path):
pack_group = utils.get_pack_group() if pack_group: shell.run_command(['sudo', 'chgrp', '-R', pack_group, pack_path]) mode = (((stat.S_IRWXU | stat.S_IRWXG) | stat.S_IROTH) | stat.S_IXOTH) os.chmod(pack_path, mode) for (root, dirs, files) in os.walk(pack_path): for d in dirs: os.chmod(os.path.join(root, d), mode) for f in files: os.chmod(os.path.join(root, f), mode)
'Allow passing short GitHub style URLs'
@staticmethod def _eval_repo_url(repo_url):
if (not repo_url): raise Exception('No valid repo_url provided or could be inferred.') if repo_url.startswith('file://'): return repo_url else: if ((len(repo_url.split('/')) == 2) and ('git@' not in repo_url)): url = 'https://github.com/{}'.format(repo_url) else: url = repo_url return url
'Read pack name from the metadata file and sanitize it.'
@staticmethod def _get_pack_ref(pack_dir):
metadata = DownloadGitRepoAction._get_pack_metadata(pack_dir=pack_dir) pack_ref = get_pack_ref_from_metadata(metadata=metadata, pack_directory_name=None) return pack_ref
'Method which returns a valid versions for a particular repo (pack). It does so by introspecting available tags. :rtype: ``list`` of ``str``'
@staticmethod def _get_valid_versions_for_repo(repo):
valid_versions = [] for tag in repo.tags: if (tag.name.startswith('v') and re.match(PACK_VERSION_REGEX, tag.name[1:])): valid_versions.append(tag.name[1:]) return valid_versions
':param query: A word or a phrase to search for :type query: ``str``'
def run(self, query):
self.logger.debug('Proxy config: %s', self.proxy_config) return search_pack_index(query, proxy_config=self.proxy_config)
':param packs_status: Result from packs.download action. :type: packs_status: ``dict``'
def run(self, packs_status):
packs = [] for (pack_name, status) in six.iteritems(packs_status): if ('success' in status.lower()): packs.append(pack_name) return packs
':param packs: A list of packs to create the environment for. :type: packs: ``list`` :param update: True to update dependencies inside the virtual environment. :type update: ``bool``'
def run(self, packs, update=False):
for pack_name in packs: setup_pack_virtualenv(pack_name=pack_name, update=update, logger=self.logger, proxy_config=self.proxy_config) message = ('Successfuly set up virtualenv for the following packs: %s' % ', '.join(packs)) return message
':param pack: Pack Name to get info about :type pack: ``str``'
def run(self, pack):
return {'pack': get_pack_from_index(pack)}
':param timeout: Action execution timeout in seconds. :type timeout: ``int``'
def __init__(self, runner_id, timeout=WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT):
super(WindowsCommandRunner, self).__init__(runner_id=runner_id) self._timeout = timeout
'Serialize inputs dictionary as YAML and write it in a temporary file. :param inputs: Inputs dictionary. :type inputs: ``dict`` :return: Path to the temporary file. :rtype: ``str``'
def _write_inputs_to_a_temp_file(self, inputs):
if (not inputs): return None LOG.debug('Inputs dict: %s', inputs) inputs_file = tempfile.NamedTemporaryFile(delete=False) inputs_file_path = inputs_file.name yaml_inputs = yaml.safe_dump(inputs, default_flow_style=False) with open(inputs_file_path, 'w') as fp: fp.write(yaml_inputs) LOG.debug('YAML serialized inputs: %s', yaml_inputs) return inputs_file_path
':param timeout: Action execution timeout in seconds. :type timeout: ``int``'
def __init__(self, runner_id, timeout=PYTHON_RUNNER_DEFAULT_ACTION_TIMEOUT):
super(PythonRunner, self).__init__(runner_id=runner_id) self._timeout = timeout
'Return sanitized output values. :return: Tuple with status, output and None :rtype: ``tuple``'
def _get_output_values(self, exit_code, stdout, stderr, timed_out):
if timed_out: error = ('Action failed to complete in %s seconds' % self._timeout) else: error = None if (exit_code == PYTHON_RUNNER_INVALID_ACTION_STATUS_EXIT_CODE): raise ValueError(stderr) if (ACTION_OUTPUT_RESULT_DELIMITER in stdout): split = stdout.split(ACTION_OUTPUT_RESULT_DELIMITER) assert (len(split) == 3) action_result = split[1].strip() stdout = (split[0] + split[2]) else: action_result = None try: action_result = json.loads(action_result) except Exception as e: LOG.warning(('Failed to de-serialize result "%s": %s' % (str(action_result), str(e)))) if action_result: if isinstance(action_result, dict): result = action_result.get('result', None) status = action_result.get('status', None) else: match = re.search("'result': (.*?)$", (action_result or '')) if match: action_result = match.groups()[0] result = action_result status = None else: result = 'None' status = None output = {'stdout': stdout, 'stderr': stderr, 'exit_code': exit_code, 'result': result} if error: output['error'] = error status = self._get_final_status(action_status=status, timed_out=timed_out, exit_code=exit_code) return (status, output, None)
'Return final status based on action\'s status, time out value and exit code. Example: succeeded, failed, timeout. :return: status :rtype: ``str``'
def _get_final_status(self, action_status, timed_out, exit_code):
if (action_status is not None): if ((exit_code == 0) and (action_status is True)): status = LIVEACTION_STATUS_SUCCEEDED elif ((exit_code == 0) and (action_status is False)): status = LIVEACTION_STATUS_FAILED else: status = LIVEACTION_STATUS_FAILED elif (exit_code == 0): status = LIVEACTION_STATUS_SUCCEEDED else: status = LIVEACTION_STATUS_FAILED if timed_out: status = LIVEACTION_STATUS_TIMED_OUT return status
'Return sanitized environment variables which will be used when launching a subprocess. :rtype: ``dict``'
def _get_env_vars(self):
env_vars = {} if self._env: env_vars.update(self._env) to_delete = [] for (key, value) in env_vars.items(): if (key.lower() in BLACKLISTED_ENV_VARS): to_delete.append(key) for key in to_delete: LOG.debug(('User specified environment variable "%s" which is being ignored...' % key)) del env_vars[key] return env_vars
'Return environment variables so datastore access using client (from st2client) is possible with actions. This is done to be compatible with sensors. :rtype: ``dict``'
def _get_datastore_access_env_vars(self):
env_vars = {} if self.auth_token: env_vars[AUTH_TOKEN_ENV_VARIABLE_NAME] = self.auth_token.token env_vars[API_URL_ENV_VARIABLE_NAME] = get_full_public_api_url() return env_vars
'Return mock action object. Pack gets set to the system pack so the action doesn\'t require a separate virtualenv.'
def _get_mock_action_obj(self):
action = mock.Mock() action.pack = SYSTEM_PACK_NAME action.entry_point = 'foo.py' return action
':param timeout: Action execution timeout in seconds. :type timeout: ``int``'
def __init__(self, runner_id, timeout=WINDOWS_RUNNER_DEFAULT_ACTION_TIMEOUT):
super(WindowsScriptRunner, self).__init__(runner_id=runner_id) self._timeout = timeout
':param script_path: Full path to the script on the remote server. :type script_path: ``str`` :param arguments: The arguments to pass to the script. :type arguments: ``str``'
def _run_script(self, script_path, arguments=None):
if (arguments is not None): command = ('%s %s %s' % (POWERSHELL_COMMAND, quote_windows(script_path), arguments)) else: command = ('%s %s' % (POWERSHELL_COMMAND, quote_windows(script_path))) args = self._get_winexe_command_args(host=self._host, username=self._username, password=self._password, command=command) LOG.debug(('Running script "%s"' % script_path)) (exit_code, stdout, stderr, timed_out) = run_command(cmd=args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, timeout=self._timeout) extra = {'exit_code': exit_code, 'stdout': stdout, 'stderr': stderr} LOG.debug('Command returned', extra=extra) return (exit_code, stdout, stderr, timed_out)
'Builds a string of named and positional arguments in PowerShell format, which are passed to the script. :param named_args: Dictionary with named arguments :type named_args: ``dict``. :param positional_args: List of positional arguments :type positional_args: ``str`` :rtype: ``str``'
def _get_script_arguments(self, named_args=None, positional_args=None):
cmd_parts = [] if positional_args: cmd_parts.append(positional_args) if named_args: for (arg, value) in six.iteritems(named_args): arg = quote_windows(arg) if ((value is None) or (isinstance(value, six.string_types) and (len(value) < 1))): LOG.debug('Ignoring arg %s as its value is %s.', arg, value) continue if isinstance(value, bool): if value: cmd_parts.append(('-%s' % arg)) else: cmd_parts.append(('-%s:$false' % arg)) elif (isinstance(value, (list, tuple)) or hasattr(value, '__iter__')): cmd_parts.append(('-%s %s' % (arg, ','.join(value)))) else: cmd_parts.append(('-%s %s' % (arg, quote_windows(str(value))))) return ' '.join(cmd_parts)
'Upload provided file to the remote server in a temporary directory. :param local_path: Local path to the file to upload. :type local_path: ``str`` :param base_path: Absolute base path for the share. :type base_path: ``str``'
def _upload_file(self, local_path, base_path):
file_name = os.path.basename(local_path) temporary_directory_name = str(uuid.uuid4()) command = ('mkdir %s' % quote_windows(temporary_directory_name)) args = self._get_smbclient_command_args(host=self._host, username=self._username, password=self._password, command=command, share=self._share) LOG.debug(('Creating temp directory "%s"' % temporary_directory_name)) (exit_code, stdout, stderr, timed_out) = run_command(cmd=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, timeout=CREATE_DIRECTORY_TIMEOUT) extra = {'exit_code': exit_code, 'stdout': stdout, 'stderr': stderr} LOG.debug('Directory created', extra=extra) remote_path = PATH_SEPARATOR.join([temporary_directory_name, file_name]) values = {'local_path': quote_windows(local_path), 'remote_path': quote_windows(remote_path)} command = ('put %(local_path)s %(remote_path)s' % values) args = self._get_smbclient_command_args(host=self._host, username=self._username, password=self._password, command=command, share=self._share) extra = {'local_path': local_path, 'remote_path': remote_path} LOG.debug(('Uploading file to "%s"' % remote_path)) (exit_code, stdout, stderr, timed_out) = run_command(cmd=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, timeout=UPLOAD_FILE_TIMEOUT) extra = {'exit_code': exit_code, 'stdout': stdout, 'stderr': stderr} LOG.debug(('File uploaded to "%s"' % remote_path), extra=extra) full_remote_file_path = ((base_path + '\\') + remote_path) full_temporary_directory_path = ((base_path + '\\') + temporary_directory_name) return (full_remote_file_path, full_temporary_directory_path)
'Retrieve full absolute path for a share with the provided name. :param share: Share name. :type share: ``str``'
def _get_share_absolute_path(self, share):
command = ('net share %s' % quote_windows(share)) args = self._get_winexe_command_args(host=self._host, username=self._username, password=self._password, command=command) LOG.debug(('Retrieving full absolute path for share "%s"' % share)) (exit_code, stdout, stderr, timed_out) = run_command(cmd=args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, timeout=self._timeout) if (exit_code != 0): msg = ('Failed to retrieve absolute path for share "%s"' % share) raise Exception(msg) share_info = self._parse_share_information(stdout=stdout) share_path = share_info.get('path', None) if (not share_path): msg = ('Failed to retrieve absolute path for share "%s"' % share) raise Exception(msg) return share_path
'Parse share information retrieved using "net share <share name>". :rtype: ``dict``'
def _parse_share_information(self, stdout):
lines = stdout.split('\n') result = {} for line in lines: line = line.strip() split = re.split('\\s{3,}', line) if (len(split) not in [1, 2]): continue key = split[0] key = key.lower().replace(' ', '_') if (len(split) == 2): value = split[1].strip() else: value = None result[key] = value return result
'Function which performs a simple compile time validation. Keep in mind that some variables are only resolved during run time which means we can perform only simple validation during compile / create time.'
def validate(self):
all_nodes = self._get_all_nodes(action_chain=self.actionchain) for node in self.actionchain.chain: on_success_node_name = node.on_success on_failure_node_name = node.on_failure valid_name = self._is_valid_node_name(all_node_names=all_nodes, node_name=on_success_node_name) if (not valid_name): msg = ('Unable to find node with name "%s" referenced in "on-success" in task "%s".' % (on_success_node_name, node.name)) raise ValueError(msg) valid_name = self._is_valid_node_name(all_node_names=all_nodes, node_name=on_failure_node_name) if (not valid_name): msg = ('Unable to find node with name "%s" referenced in "on-failure" in task "%s".' % (on_failure_node_name, node.name)) raise ValueError(msg) if self.actionchain.default: valid_name = self._is_valid_node_name(all_node_names=all_nodes, node_name=self.actionchain.default) if (not valid_name): msg = ('Unable to find node with name "%s" referenced in "default".' % self.actionchain.default) raise ValueError(msg) return True
'Return names for all the nodes in the chain.'
@staticmethod def _get_all_nodes(action_chain):
all_nodes = [node.name for node in action_chain.chain] return all_nodes
'Return names for all the tasks referenced in "on-success".'
@staticmethod def _get_all_on_success_nodes(action_chain):
on_success_nodes = set([node.on_success for node in action_chain.chain]) return on_success_nodes
'Return names for all the tasks referenced in "on-failure".'
@staticmethod def _get_all_on_failure_nodes(action_chain):
on_failure_nodes = set([node.on_failure for node in action_chain.chain]) return on_failure_nodes
'Function which validates that the provided node name is defined in the workflow definition and it\'s valid. Keep in mind that we can only perform validation for task names which don\'t include jinja expressions since those are rendered at run time.'
def _is_valid_node_name(self, all_node_names, node_name):
if (not node_name): return True is_jinja_expression = jinja_utils.is_jinja_expression(value=node_name) if is_jinja_expression: return True return (node_name in all_node_names)
'If no output is specified on the action_node the output is the entire execution_result. If any output is specified then only those variables are published as output of an execution of this action_node. The output variable can refer to a variable from the execution_result, previous_execution_results or chain_vars.'
@staticmethod def _render_publish_vars(action_node, action_parameters, execution_result, previous_execution_results, chain_vars):
if (not action_node.publish): return {} context = {} context.update(action_parameters) context.update({action_node.name: execution_result}) context.update(previous_execution_results) context.update(chain_vars) context.update({RESULTS_KEY: previous_execution_results}) context.update({SYSTEM_SCOPE: KeyValueLookup(scope=SYSTEM_SCOPE)}) context.update({DATASTORE_PARENT_SCOPE: {SYSTEM_SCOPE: KeyValueLookup(scope=FULL_SYSTEM_SCOPE)}}) try: rendered_result = jinja_utils.render_values(mapping=action_node.publish, context=context) except Exception as e: key = getattr(e, 'key', None) value = getattr(e, 'value', None) msg = ('Failed rendering value for publish parameter "%s" in task "%s" (template string=%s): %s' % (key, action_node.name, value, str(e))) raise ParameterRenderingFailedException(msg) return rendered_result
':param sleep_delay: Number of seconds to wait during "is completed" polls. :type sleep_delay: ``float``'
def _run_action(self, liveaction, wait_for_completion=True, sleep_delay=1.0):
try: (liveaction, _) = action_service.request(liveaction) except Exception as e: liveaction.status = LIVEACTION_STATUS_FAILED LOG.exception('Failed to schedule liveaction.') raise e while (wait_for_completion and (liveaction.status not in LIVEACTION_COMPLETED_STATES)): eventlet.sleep(sleep_delay) liveaction = action_db_util.get_liveaction_by_id(liveaction.id) return liveaction
'Format ActionExecution result so it can be used in the final action result output. :rtype: ``dict``'
def _format_action_exec_result(self, action_node, liveaction_db, created_at, updated_at, error=None):
assert isinstance(created_at, datetime.datetime) assert isinstance(updated_at, datetime.datetime) result = {} execution_db = None if liveaction_db: execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) result['id'] = action_node.name result['name'] = action_node.name result['execution_id'] = (str(execution_db.id) if execution_db else None) result['workflow'] = None result['created_at'] = isotime.format(dt=created_at) result['updated_at'] = isotime.format(dt=updated_at) if (error or (not liveaction_db)): result['state'] = LIVEACTION_STATUS_FAILED else: result['state'] = liveaction_db.status if error: result['result'] = error else: result['result'] = liveaction_db.result return result
':param body: Response body. :type body: ``str`` :return: (parsed body, flag which indicates if body has been parsed) :rtype: (``object``, ``bool``)'
def _parse_response_body(self, headers, body):
body = (body or '') headers = self._normalize_headers(headers=headers) content_type = headers.get('content-type', None) parsed = False if (not content_type): return (body, parsed) content_type = content_type.split(';')[0] parse_func = RESPONSE_BODY_PARSE_FUNCTIONS.get(content_type, None) if (not parse_func): return (body, parsed) LOG.debug('Parsing body with content type: %s', content_type) try: body = parse_func(body) except Exception: LOG.exception('Failed to parse body') else: parsed = True return (body, parsed)
'Normalize the header keys by lowercasing all the keys.'
def _normalize_headers(self, headers):
result = {} for (key, value) in headers.items(): result[key.lower()] = value return result
'Mistral workflow might be kicked off in st2 by a parent Mistral workflow. In that case, we need to make sure that the existing mistral \'context\' is moved as \'parent\' and the child workflow \'context\' is added.'
@staticmethod def _build_mistral_context(parent, current):
parent = copy.deepcopy(parent) context = dict() if (not parent): context['mistral'] = current elif ('mistral' in parent.keys()): orig_parent_context = parent.get('mistral', dict()) actual_parent = dict() if ('workflow_name' in orig_parent_context.keys()): actual_parent['workflow_name'] = orig_parent_context['workflow_name'] del orig_parent_context['workflow_name'] if ('workflow_execution_id' in orig_parent_context.keys()): actual_parent['workflow_execution_id'] = orig_parent_context['workflow_execution_id'] del orig_parent_context['workflow_execution_id'] context['mistral'] = orig_parent_context context['mistral'].update(current) context['mistral']['parent'] = actual_parent else: context['mistral'] = current return context
'Queries mistral for workflow results using v2 APIs. :param execution_id: st2 execution_id (context to be used for logging/audit) :type execution_id: ``str`` :param query_context: context for the query to be made to mistral. This contains mistral execution id. :type query_context: ``object`` :param last_query_time: Timestamp of last query. :type last_query_time: ``float`` :rtype: (``str``, ``object``)'
@retrying.retry(retry_on_exception=utils.retry_on_exceptions, wait_exponential_multiplier=cfg.CONF.mistral.retry_exp_msec, wait_exponential_max=cfg.CONF.mistral.retry_exp_max_msec, stop_max_delay=cfg.CONF.mistral.retry_stop_max_msec) def query(self, execution_id, query_context, last_query_time=None):
dt_last_query_time = (time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(last_query_time)) if last_query_time else None) liveaction_db = action_utils.get_liveaction_by_id(execution_id) mistral_exec_id = query_context.get('mistral', {}).get('execution_id', None) if (not mistral_exec_id): raise Exception('[%s] Missing mistral workflow execution ID in query context. %s', execution_id, query_context) try: wf_result = self._get_workflow_result(mistral_exec_id) wf_tasks_result = self._get_workflow_tasks(mistral_exec_id, last_query_time=dt_last_query_time) result = self._format_query_result(liveaction_db.result, wf_result, wf_tasks_result) except exceptions.ReferenceNotFoundError as exc: LOG.exception('[%s] Unable to find reference.', execution_id) return (action_constants.LIVEACTION_STATUS_FAILED, exc.message) except Exception: LOG.exception('[%s] Unable to fetch mistral workflow result and tasks. %s', execution_id, query_context) raise status = self._determine_execution_status(liveaction_db, result['extra']['state'], result['tasks']) LOG.debug(('[%s] mistral workflow execution status: %s' % (execution_id, status))) LOG.debug(('[%s] mistral workflow execution result: %s' % (execution_id, result))) return (status, result)
'Returns the workflow status and output. Mistral workflow status will be converted to st2 action status. :param exec_id: Mistral execution ID :type exec_id: ``str`` :rtype: (``str``, ``dict``)'
def _get_workflow_result(self, exec_id):
try: jitter = random.uniform(0, self._jitter) eventlet.sleep(jitter) execution = self._client.executions.get(exec_id) except mistralclient_base.APIException as mistral_exc: if ('not found' in mistral_exc.message): raise exceptions.ReferenceNotFoundError(mistral_exc.message) raise mistral_exc result = (jsonify.try_loads(execution.output) if (execution.state in DONE_STATES) else {}) result['extra'] = {'state': execution.state, 'state_info': execution.state_info} return result
'Returns the list of tasks for a workflow execution. :param exec_id: Mistral execution ID :type exec_id: ``str`` :param last_query_time: Timestamp to filter tasks :type last_query_time: ``str`` :rtype: ``list``'
def _get_workflow_tasks(self, exec_id, last_query_time=None):
result = [] try: query_filters = {} if last_query_time: query_filters['updated_at'] = ('gte:%s' % last_query_time) wf_tasks = self._client.tasks.list(workflow_execution_id=exec_id, **query_filters) for wf_task in wf_tasks: result.append(self._client.tasks.get(wf_task.id)) jitter = random.uniform(0, self._jitter) eventlet.sleep(jitter) except mistralclient_base.APIException as mistral_exc: if ('not found' in mistral_exc.message): raise exceptions.ReferenceNotFoundError(mistral_exc.message) raise mistral_exc return [self._format_task_result(task=entry.to_dict()) for entry in result]
'Format task result to follow the unified workflow result format.'
def _format_task_result(self, task):
result = {'id': task['id'], 'name': task['name'], 'workflow_execution_id': task.get('workflow_execution_id', None), 'workflow_name': task['workflow_name'], 'created_at': task.get('created_at', None), 'updated_at': task.get('updated_at', None), 'state': task.get('state', None), 'state_info': task.get('state_info', None)} for attr in ['result', 'input', 'published']: result[attr] = jsonify.try_loads(task.get(attr, None)) return result
'Validate that the sample role definition which we ship with default installation works.'
def test_load_sample_role_definition(self):
loader = RBACDefinitionsLoader() file_path = os.path.join(get_fixtures_base_path(), 'rbac/roles/role_sample.yaml') role_api = loader.load_role_definition_from_file(file_path=file_path) self.assertEqual(role_api.name, 'sample') self.assertFalse(role_api.enabled)
'Validate that the sample user role assignment definition which we ship with default installation works.'
def test_load_sample_user_role_assignment_definition(self):
loader = RBACDefinitionsLoader() file_path = os.path.join(get_fixtures_base_path(), 'rbac/assignments/user_sample.yaml') assignment_api = loader.load_user_role_assignments_from_file(file_path=file_path) self.assertEqual(assignment_api.username, 'stackstorm_user') self.assertFalse(assignment_api.enabled)
'Tests connectivity to the db server. Requires the db server to be running.'
def test_check_connect(self):
client = mongoengine.connection.get_connection() expected_str = ("host=['%s:%s']" % (cfg.CONF.database.host, cfg.CONF.database.port)) self.assertTrue((expected_str in str(client)), 'Not connected to desired host.')
'This sensor has misconfigured trigger type. We shouldn\'t explode.'
def test_add_trigger_type(self):
class FailTestSensor(object, ): started = False def setup(self): pass def start(self): FailTestSensor.started = True def stop(self): pass def get_trigger_types(self): return [{'description': "Ain't got no name"}] try: trigger_service.add_trigger_models(FailTestSensor().get_trigger_types()) self.assertTrue(False, "Trigger type doesn't have 'name' field. Should have thrown.") except Exception: self.assertTrue(True)
'Test that INFO log entry does not go to the audit log.'
def test_log_info(self):
logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.info(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
'Test that CRITICAL log entry does not go to the audit log.'
def test_log_critical(self):
logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.critical(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
'Test that AUDIT log entry goes to the audit log.'
def test_log_audit(self):
logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.audit(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertIn(msg, audit_log_entries)
'Assert that the user has the provided permission.'
def assertUserHasPermission(self, resolver, user_db, permission_type):
self.assertTrue(isinstance(permission_type, six.string_types)) result = resolver.user_has_permission(user_db=user_db, permission_type=permission_type) if (not result): msg = ('Expected permission grant "%s" for user "%s" but no grant was found' % (permission_type, user_db.name)) raise AssertionError(msg) return True
'Assert that the user has the provided permission.'
def assertUserDoesntHavePermission(self, resolver, user_db, permission_type):
self.assertTrue(isinstance(permission_type, six.string_types)) result = resolver.user_has_permission(user_db=user_db, permission_type=permission_type) if result: msg = ('Found permission grant "%s" for user "%s" which shouldn\'t exist' % (permission_type, user_db.name)) raise AssertionError(msg) return True
'Assert that the user has the provided permission on the provided resource.'
def assertUserHasResourceDbPermission(self, resolver, user_db, resource_db, permission_type):
self.assertTrue(isinstance(permission_type, six.string_types)) result = resolver.user_has_resource_db_permission(user_db=user_db, resource_db=resource_db, permission_type=permission_type) if (not result): msg = ('Expected permission grant "%s" for user "%s" on resource DB "%s", but no grant was found' % (permission_type, user_db.name, resource_db.get_uid())) raise AssertionError(msg) return True
'Assert that the user has the provided permission on the provided resource.'
def assertUserDoesntHaveResourceDbPermission(self, resolver, user_db, resource_db, permission_type):
self.assertTrue(isinstance(permission_type, six.string_types)) result = resolver.user_has_resource_db_permission(user_db=user_db, resource_db=resource_db, permission_type=permission_type) if result: msg = ('Found permission grant "%s" for user "%s" on resource DB "%s", which shouldn\'t exist' % (permission_type, user_db.name, resource_db.get_uid())) raise AssertionError(msg) return True
'Assert that the user has all the specified permissions on the provided resource. If permission grant is not found, an AssertionError is thrown.'
def assertUserHasResourceDbPermissions(self, resolver, user_db, resource_db, permission_types):
self.assertTrue(isinstance(permission_types, (list, tuple))) self.assertTrue((len(permission_types) > 1)) for permission_type in permission_types: self.assertUserHasResourceDbPermission(resolver=resolver, user_db=user_db, resource_db=resource_db, permission_type=permission_type) return True
'Assert that the user doesn\'t have all the specified permissions on the provided resource. If a permission grant which shouldn\'t exist is found, an AssertionError is thrown.'
def assertUserDoesntHaveResourceDbPermissions(self, resolver, user_db, resource_db, permission_types):
self.assertTrue(isinstance(permission_types, (list, tuple))) self.assertTrue((len(permission_types) > 1)) for permission_type in permission_types: self.assertUserDoesntHaveResourceDbPermission(resolver=resolver, user_db=user_db, resource_db=resource_db, permission_type=permission_type) return True
'Process the object and mask secret values. :type value: ``dict`` :param value: Document dictionary. :rtype: ``dict``'
def mask_secrets(self, value):
return value
'Serialize object to a dictionary which can be serialized as JSON. :param mask_secrets: True to mask secrets in the resulting dict. :type mask_secrets: ``boolean`` :rtype: ``dict``'
def to_serializable_dict(self, mask_secrets=False):
raise NotImplementedError()
'Perform validation and return cleaned object on success. Note: This method doesn\'t mutate this object in place, but it returns a new one. :return: Cleaned / validated object.'
def validate(self):
from st2common.util import schema as util_schema schema = getattr(self, 'schema', {}) attributes = vars(self) cleaned = util_schema.validate(instance=attributes, schema=schema, cls=util_schema.CustomValidator, use_default=True, allow_default_none=True) return type(self)(**cleaned)
'Create API model class instance for the provided DB model instance. :param model: DB model class instance. :type model: :class:`StormFoundationDB` :param mask_secrets: True to mask secrets in the resulting instance. :type mask_secrets: ``boolean``'
@classmethod def from_model(cls, model, mask_secrets=False):
doc = cls._from_model(model=model, mask_secrets=mask_secrets) attrs = {attr: value for (attr, value) in six.iteritems(doc) if (value is not None)} return cls(**attrs)
'Create a model class instance for the provided MongoDB document. :param doc: MongoDB document.'
@classmethod def to_model(cls, doc):
raise NotImplementedError()
'Delete objects by query and return number of deleted objects.'
def delete_by_query(self, **query):
qs = self.model.objects.filter(**query) count = qs.delete() log_query_and_profile_data_for_queryset(queryset=qs) return count
'This method uses a very naive approach to determine if the provided string is a reference - it only checks if this string contains a separator. :rtype ref: ``str``'
@classmethod def is_reference(cls, ref):
return (ref and (cls.separator in ref) and (ref.index(cls.separator) > 0))
'Retrieve reference object for this model. :rtype: :class:`PolicyReference`'
def get_reference(self):
return PolicyTypeReference(resource_type=self.resource_type, name=self.name)
'Retrieve roles assigned to that user. :param include_remote: True to also include remote role assignments. :type include_remote: ``bool`` :rtype: ``list`` of :class:`RoleDB`'
def get_roles(self, include_remote=True):
result = get_roles_for_user(user_db=self, include_remote=include_remote) return result
'Process the model dictionary and mask secret values. :type value: ``dict`` :param value: Document dictionary. :rtype: ``dict``'
def mask_secrets(self, value):
return value
'Serialize database model to a dictionary. :param mask_secrets: True to mask secrets in the resulting dict. :type mask_secrets: ``boolean`` :rtype: ``dict``'
def to_serializable_dict(self, mask_secrets=False):
serializable_dict = {} for k in sorted(six.iterkeys(self._fields)): v = getattr(self, k) v = (str(v) if isinstance(v, JSON_UNFRIENDLY_TYPES) else v) serializable_dict[k] = v if (mask_secrets and cfg.CONF.log.mask_secrets): serializable_dict = self.mask_secrets(value=serializable_dict) return serializable_dict
'Return an UID of a pack this resource belongs to. :rtype ``str``'
def get_pack_uid(self):
parts = [ResourceType.PACK, self.pack] uid = UIDFieldMixin.UID_SEPARATOR.join(parts) return uid
'Retrieve referene object for this model. :rtype: :class:`ResourceReference`'
def get_reference(self):
if getattr(self, 'ref', None): ref = ResourceReference.from_string_reference(ref=self.ref) else: ref = ResourceReference(pack=self.pack, name=self.name) return ref
'Return True if this action is a workflow, False otherwise. :rtype: ``bool``'
def is_workflow(self):
return (self.runner_type['name'] in WORKFLOW_RUNNER_TYPES)
'Return a list of all the supported format strings. :rtype: ``list`` of ``str``'
def get_format_strings(self):
result = [] formats = getattr(self, 'formats', []) for format_string in formats: if (isinstance(format_string, dict) and format_string.get('representation', None)): result.extend(format_string['representation']) else: result.append(format_string) return result
'Retrieve parameters with the secrets masked. :rtype: ``dict``'
def get_masked_parameters(self):
serializable_dict = self.to_serializable_dict(mask_secrets=True) return serializable_dict['parameters']
'Process the model dictionary and mask secret values. :type value: ``dict`` :param value: Document dictionary. :rtype: ``dict``'
def mask_secrets(self, value):
result = copy.deepcopy(value) config_schema = config_schema_access.get_by_pack(result['pack']) secret_parameters = get_secret_parameters(parameters=config_schema.attributes) result['values'] = mask_secret_parameters(parameters=result['values'], secret_parameters=secret_parameters) return result
'Retrieve parameters with the secrets masked. :rtype: ``dict``'
def get_masked_parameters(self):
serializable_dict = self.to_serializable_dict(mask_secrets=True) return serializable_dict['parameters']
'Match command against the format string and extract paramters from the command string. :rtype: ``dict``'
def get_extracted_param_value(self):
result = {} param_stream = self._param_stream snippets = dict() snippets['key'] = '\\s*(\\S+?)\\s*' snippets['value'] = '""|\\\'\\\'|"(.+?)"|\\\'(.+?)\\\'|({.+?})|(\\S+)' snippets['ext_value'] = '""|\\\'\\\'|"(.+?)"|\\\'(.+?)\\\'|({.+?})|(.+?)' snippets['pairs'] = '(?:^|\\s+){key}=({value})'.format(**snippets) snippets['ending'] = '.*?(({pairs}\\s*)*)$'.format(**snippets) snippets['default'] = '\\s*=\\s*(?:{ext_value})\\s*'.format(**snippets) snippets['optional'] = ((('{{' + snippets['key']) + snippets['default']) + '}}') snippets['required'] = (('{{' + snippets['key']) + '}}') ending_pairs = re.match(snippets['ending'], param_stream, re.DOTALL) has_ending_pairs = (ending_pairs and ending_pairs.group(1)) if has_ending_pairs: kv_pairs = re.findall(snippets['pairs'], ending_pairs.group(1), re.DOTALL) param_stream = param_stream.replace(ending_pairs.group(1), '') param_stream = (' %s ' % param_stream) optional = re.findall(snippets['optional'], self._format, re.DOTALL) param_match = '\\1["\\\']?(?P<\\2>(?:(?<=\\\').+?(?=\\\')|(?<=").+?(?=")|{.+?}|.+?))["\\\']?' reg = re.sub(('(\\s*)' + snippets['optional']), (('(?:' + param_match) + ')?'), self._format) reg = re.sub(('(\\s*)' + snippets['required']), param_match, reg) reg = (('^\\s*' + reg) + '\\s*$') matched_stream = re.match(reg, param_stream, re.DOTALL) if (not matched_stream): raise ParseException(('Command "%s" doesn\'t match format string "%s"' % (self._param_stream, self._format))) if matched_stream: result = matched_stream.groupdict() for param in optional: matched_value = (result[param[0]] if matched_stream else None) matched_result = (matched_value or ''.join(param[1:])) if (matched_result is not None): result[param[0]] = matched_result if has_ending_pairs: for pair in kv_pairs: result[pair[0]] = ''.join(pair[2:]) if (self._format and (not (self._param_stream.strip() or any(result.values())))): raise ParseException('No value supplied and no default value found.') return result
'This method uses a very naive approach to determine if the provided string is a resource reference - it only checks if this string contains a separator. :rtype ref: ``str``'
@staticmethod def is_resource_reference(ref):
return (PACK_SEPARATOR in ref)
'Retrieve a list of flags which are passed to sudo on every invocation. :rtype: ``list``'
def _get_common_sudo_arguments(self):
flags = copy.copy(SUDO_COMMON_OPTIONS) return flags
'Retrieve a list of flags which are passed to sudo when running as a different user and "-u" flag is used. :rtype: ``list``'
def _get_user_sudo_arguments(self, user):
flags = self._get_common_sudo_arguments() flags += copy.copy(SUDO_DIFFERENT_USER_OPTIONS) flags += ['-u', user] return flags
'Escape the command arguments and form a command string. :type cmd: ``str`` :type args: ``list`` :rtype: ``str``'
def _get_command_string(self, cmd, args):
assert isinstance(args, (list, tuple)) args = [quote_unix(arg) for arg in args] args = ' '.join(args) result = ('%s %s' % (cmd, args)) return result
'Prepares a structured error result based on the exception. :type e: ``Exception`` :rtype: ``dict``'
def _get_error_result(self):
(_, exc_value, exc_traceback) = sys.exc_info() exc_value = str(exc_value) exc_traceback = ''.join(traceback.format_tb(exc_traceback)) result = {} result['failed'] = True result['succeeded'] = False result['error'] = exc_value result['traceback'] = exc_traceback return result