desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Create a Request object using environ.'
def run(self):
env = self.environ.get local = httputil.Host('', int(env('SERVER_PORT', 80)), env('SERVER_NAME', '')) remote = httputil.Host(env('REMOTE_ADDR', ''), int(env('REMOTE_PORT', (-1))), env('REMOTE_HOST', '')) scheme = env('wsgi.url_scheme') sproto = env('ACTUAL_SERVER_PROTOCOL', 'HTTP/1.1') (request, resp) = self.cpapp.get_serving(local, remote, scheme, sproto) request.login = (env('LOGON_USER') or env('REMOTE_USER') or None) request.multithread = self.environ['wsgi.multithread'] request.multiprocess = self.environ['wsgi.multiprocess'] request.wsgi_environ = self.environ request.prev = env('cherrypy.previous_request', None) meth = self.environ['REQUEST_METHOD'] path = httputil.urljoin(self.environ.get('SCRIPT_NAME', ''), self.environ.get('PATH_INFO', '')) qs = self.environ.get('QUERY_STRING', '') rproto = self.environ.get('SERVER_PROTOCOL') headers = self.translate_headers(self.environ) rfile = self.environ['wsgi.input'] request.run(meth, path, qs, rproto, headers, rfile)
'Translate CGI-environ header names to HTTP header names.'
def translate_headers(self, environ):
for cgiName in environ: if (cgiName in self.headerNames): (yield (self.headerNames[cgiName], environ[cgiName])) elif (cgiName[:5] == 'HTTP_'): translatedHeader = cgiName[5:].replace('_', '-') (yield (translatedHeader, environ[cgiName]))
'WSGI application callable for the actual CherryPy application. You probably shouldn\'t call this; call self.__call__ instead, so that any WSGI middleware in self.pipeline can run first.'
def tail(self, environ, start_response):
return self.response_class(environ, start_response, self.cpapp)
'Config handler for the \'wsgi\' namespace.'
def namespace_handler(self, k, v):
if (k == 'pipeline'): self.pipeline.extend(v) elif (k == 'response_class'): self.response_class = v else: (name, arg) = k.split('.', 1) bucket = self.config.setdefault(name, {}) bucket[arg] = v
'Write data to file_path.'
@abc.abstractmethod def write(self, data, file_path, replace=False):
pass
'All TriggerDB that has a parameter.'
def _get_trigger_with_parameters(self):
return TriggerDB.objects((Q(parameters__exists=True) & Q(parameters__nin=[{}])))
'All rules that reference the supplied trigger_ref.'
def _get_rules_for_trigger(self, trigger_ref):
return Rule.get_all(**{'trigger': trigger_ref})
'Non-publishing ref_count update to a TriggerDB.'
def _update_trigger_ref_count(self, trigger_db, ref_count):
trigger_db.ref_count = ref_count Trigger.add_or_update(trigger_db, publish=False, dispatch_trigger=False)
'Will migrate all Triggers that should have ref_count to have the right ref_count.'
def migrate(self):
trigger_dbs = self._get_trigger_with_parameters() for trigger_db in trigger_dbs: trigger_ref = trigger_db.get_reference().ref rules = self._get_rules_for_trigger(trigger_ref=trigger_ref) ref_count = len(rules) print ('Updating Trigger %s to ref_count %s' % (trigger_ref, ref_count)) self._update_trigger_ref_count(trigger_db=trigger_db, ref_count=ref_count)
'Initialize a DebugInfoCollector object. :param include_logs: Include log files in generated archive. :type include_logs: ``bool`` :param include_configs: Include config files in generated archive. :type include_configs: ``bool`` :param include_content: Include pack contents in generated archive. :type include_content: ``bool`` :param include_system_info: Include system information in generated archive. :type include_system_info: ``bool`` :param include_shell_commands: Include shell command output in generated archive. :type include_shell_commands: ``bool`` :param user_info: User info to be included in generated archive. :type user_info: ``dict`` :param debug: Enable debug logging. :type debug: ``bool`` :param config_file: Values from config file to override defaults. :type config_file: ``dict`` :param output_path: Path to write output file to. (optional) :type output_path: ``str``'
def __init__(self, include_logs, include_configs, include_content, include_system_info, include_shell_commands=False, user_info=None, debug=False, config_file=None, output_path=None):
self.include_logs = include_logs self.include_configs = include_configs self.include_content = include_content self.include_system_info = include_system_info self.include_shell_commands = include_shell_commands self.user_info = user_info self.debug = debug self.output_path = output_path config_file = (config_file or {}) self.st2_config_file_path = config_file.get('st2_config_file_path', ST2_CONFIG_FILE_PATH) self.mistral_config_file_path = config_file.get('mistral_config_file_path', MISTRAL_CONFIG_FILE_PATH) self.log_file_paths = config_file.get('log_file_paths', LOG_FILE_PATHS[:]) self.gpg_key = config_file.get('gpg_key', GPG_KEY) self.gpg_key_fingerprint = config_file.get('gpg_key_fingerprint', GPG_KEY_FINGERPRINT) self.s3_bucket_url = config_file.get('s3_bucket_url', S3_BUCKET_URL) self.company_name = config_file.get('company_name', COMPANY_NAME) self.shell_commands = config_file.get('shell_commands', SHELL_COMMANDS) self.st2_config_file_name = os.path.basename(self.st2_config_file_path) self.mistral_config_file_name = os.path.basename(self.mistral_config_file_path) self.config_file_paths = [self.st2_config_file_path, self.mistral_config_file_path]
'Run the specified steps. :param encrypt: If true, encrypt the archive file. :param encrypt: ``bool`` :param upload: If true, upload the resulting file. :param upload: ``bool`` :param existing_file: Path to an existing archive file. If not specified a new archive will be created. :param existing_file: ``str``'
def run(self, encrypt=False, upload=False, existing_file=None):
temp_files = [] try: if existing_file: working_file = existing_file else: working_file = self.create_archive() if ((not encrypt) and (not upload)): LOG.info(('Debug tarball successfully generated and can be reviewed at: %s' % working_file)) else: temp_files.append(working_file) if encrypt: working_file = self.encrypt_archive(archive_file_path=working_file) if (not upload): LOG.info(('Encrypted debug tarball successfully generated at: %s' % working_file)) else: temp_files.append(working_file) if upload: self.upload_archive(archive_file_path=working_file) tarball_name = os.path.basename(working_file) LOG.info(('Debug tarball successfully uploaded to %s (name=%s)' % (self.company_name, tarball_name))) LOG.info(('When communicating with support, please let them know the tarball name - %s' % tarball_name)) finally: for temp_file in temp_files: assert temp_file.startswith('/tmp') remove_file(file_path=temp_file)
'Create an archive with debugging information. :return: Path to the generated archive. :rtype: ``str``'
def create_archive(self):
try: self._temp_dir_path = self.create_temp_directories() output_paths = {} for (key, path) in OUTPUT_PATHS.iteritems(): output_paths[key] = os.path.join(self._temp_dir_path, path) LOG.info('Collecting files...') if self.include_logs: self.collect_logs(output_paths['logs']) if self.include_configs: self.collect_config_files(output_paths['configs']) if self.include_content: self.collect_pack_content(output_paths['content']) if self.include_system_info: self.add_system_information(output_paths['system_info']) if self.user_info: self.add_user_info(output_paths['user_info']) if self.include_shell_commands: self.add_shell_command_output(output_paths['commands']) return self.create_tarball(self._temp_dir_path) except Exception as e: LOG.exception('Failed to generate tarball', exc_info=True) raise e finally: assert self._temp_dir_path.startswith('/tmp') remove_dir(self._temp_dir_path)
'Encrypt archive with debugging information using our public key. :param archive_file_path: Path to the non-encrypted tarball file. :type archive_file_path: ``str`` :return: Path to the encrypted archive. :rtype: ``str``'
def encrypt_archive(self, archive_file_path):
try: assert archive_file_path.endswith('.tar.gz') LOG.info('Encrypting tarball...') gpg = gnupg.GPG(verbose=self.debug) import_result = gpg.import_keys(self.gpg_key) assert (import_result.count == 1) encrypted_archive_output_file_name = (os.path.basename(archive_file_path) + '.asc') encrypted_archive_output_file_path = os.path.join('/tmp', encrypted_archive_output_file_name) with open(archive_file_path, 'rb') as fp: gpg.encrypt_file(file=fp, recipients=self.gpg_key_fingerprint, always_trust=True, output=encrypted_archive_output_file_path) return encrypted_archive_output_file_path except Exception as e: LOG.exception('Failed to encrypt archive', exc_info=True) raise e
'Upload the encrypted archive. :param archive_file_path: Path to the encrypted tarball file. :type archive_file_path: ``str``'
def upload_archive(self, archive_file_path):
try: assert archive_file_path.endswith('.asc') LOG.debug('Uploading tarball...') file_name = os.path.basename(archive_file_path) url = (self.s3_bucket_url + file_name) assert url.startswith('https://') with open(archive_file_path, 'rb') as fp: response = requests.put(url=url, files={'file': fp}) assert (response.status_code == httplib.OK) except Exception as e: LOG.exception(('Failed to upload tarball to %s' % self.company_name), exc_info=True) raise e
'Copy log files to the output path. :param output_path: Path where log files will be copied to. :type output_path: ``str``'
def collect_logs(self, output_path):
LOG.debug('Including log files') for file_path_glob in self.log_file_paths: log_file_list = get_full_file_list(file_path_glob=file_path_glob) copy_files(file_paths=log_file_list, destination=output_path)
'Copy config files to the output path. :param output_path: Path where config files will be copied to. :type output_path: ``str``'
def collect_config_files(self, output_path):
LOG.debug('Including config files') copy_files(file_paths=self.config_file_paths, destination=output_path) st2_config_path = os.path.join(output_path, self.st2_config_file_name) process_st2_config(config_path=st2_config_path) mistral_config_path = os.path.join(output_path, self.mistral_config_file_name) process_mistral_config(config_path=mistral_config_path)
'Copy pack contents to the output path. :param output_path: Path where pack contents will be copied to. :type output_path: ``str``'
@staticmethod def collect_pack_content(output_path):
LOG.debug('Including content') packs_base_paths = get_packs_base_paths() for (index, packs_base_path) in enumerate(packs_base_paths, 1): dst = os.path.join(output_path, ('dir-%s' % index)) try: shutil.copytree(src=packs_base_path, dst=dst) except IOError: continue base_pack_dirs = get_dirs_in_path(file_path=output_path) for base_pack_dir in base_pack_dirs: pack_dirs = get_dirs_in_path(file_path=base_pack_dir) for pack_dir in pack_dirs: process_content_pack_dir(pack_dir=pack_dir)
'Collect and write system information to output path. :param output_path: Path where system information will be written to. :type output_path: ``str``'
def add_system_information(self, output_path):
LOG.debug('Including system info') system_information = yaml.safe_dump(self.get_system_information(), default_flow_style=False) with open(output_path, 'w') as fp: fp.write(system_information)
'Write user info to output path as YAML. :param output_path: Path where user info will be written. :type output_path: ``str``'
def add_user_info(self, output_path):
LOG.debug('Including user info') user_info = yaml.safe_dump(self.user_info, default_flow_style=False) with open(output_path, 'w') as fp: fp.write(user_info)
'Get output of the required shell command and redirect the output to output path. :param output_path: Directory where output files will be written :param output_path: ``str``'
def add_shell_command_output(self, output_path):
LOG.debug('Including the required shell commands output files') for cmd in self.shell_commands: output_file = os.path.join(output_path, ('%s.txt' % self.format_output_filename(cmd))) (exit_code, stdout, stderr) = run_command(cmd=cmd, shell=True, cwd=output_path) with open(output_file, 'w') as fp: fp.write('[BEGIN STDOUT]\n') fp.write(stdout) fp.write('[END STDOUT]\n') fp.write('[BEGIN STDERR]\n') fp.write(stderr) fp.write('[END STDERR]')
'Create tarball with the contents of temp_dir_path. Tarball will be written to self.output_path, if set. Otherwise it will be written to /tmp a name generated according to OUTPUT_FILENAME_TEMPLATE. :param temp_dir_path: Base directory to include in tarbal. :type temp_dir_path: ``str`` :return: Path to the created tarball. :rtype: ``str``'
def create_tarball(self, temp_dir_path):
LOG.info('Creating tarball...') if self.output_path: output_file_path = self.output_path else: date = date_utils.get_datetime_utc_now().strftime(DATE_FORMAT) values = {'hostname': socket.gethostname(), 'date': date} output_file_name = (OUTPUT_FILENAME_TEMPLATE % values) output_file_path = os.path.join('/tmp', output_file_name) with tarfile.open(output_file_path, 'w:gz') as tar: tar.add(temp_dir_path, arcname='') return output_file_path
'Creates a new temp directory and creates the directory structure as defined by DIRECTORY_STRUCTURE. :return: Path to temp directory. :rtype: ``str``'
@staticmethod def create_temp_directories():
temp_dir_path = tempfile.mkdtemp() for directory_name in DIRECTORY_STRUCTURE: full_path = os.path.join(temp_dir_path, directory_name) os.mkdir(full_path) return temp_dir_path
'Remove whitespace and special characters from a shell command. Used to create filename-safe representations of a shell command. :param cmd: Shell command. :type cmd: ``str`` :return: Formatted filename. :rtype: ``str``'
@staticmethod def format_output_filename(cmd):
return cmd.translate(None, ' !@#$%^&*()[]{};:,./<>?\\|`~=+"\'')
'Retrieve system information which is included in the report. :rtype: ``dict``'
@staticmethod def get_system_information():
system_information = {'hostname': socket.gethostname(), 'operating_system': {}, 'hardware': {'cpu': {}, 'memory': {}}, 'python': {}, 'stackstorm': {}, 'mistral': {}} system_information['operating_system']['system'] = platform.system() system_information['operating_system']['release'] = platform.release() system_information['operating_system']['operating_system'] = platform.platform() system_information['operating_system']['platform'] = platform.system() system_information['operating_system']['architecture'] = ' '.join(platform.architecture()) if (platform.system().lower() == 'linux'): distribution = ' '.join(platform.linux_distribution()) system_information['operating_system']['distribution'] = distribution system_information['python']['version'] = sys.version.split('\n')[0] cpu_info = get_cpu_info() if cpu_info: core_count = len(cpu_info) model = cpu_info[0]['model_name'] system_information['hardware']['cpu'] = {'core_count': core_count, 'model_name': model} else: system_information['hardware']['cpu'] = 'unsupported platform' memory_info = get_memory_info() if memory_info: total = (memory_info['MemTotal'] / 1024) free = (memory_info['MemFree'] / 1024) used = (total - free) system_information['hardware']['memory'] = {'total': total, 'used': used, 'free': free} else: system_information['hardware']['memory'] = 'unsupported platform' system_information['stackstorm']['version'] = st2_version st2common_path = st2common.__file__ st2common_path = os.path.dirname(st2common_path) if ('st2common/st2common' in st2common_path): base_install_path = st2common_path.replace('/st2common/st2common', '') revision_hash = get_repo_latest_revision_hash(repo_path=base_install_path) system_information['stackstorm']['installation_method'] = 'source' system_information['stackstorm']['revision_hash'] = revision_hash else: package_list = get_package_list(name_startswith='st2') system_information['stackstorm']['installation_method'] = 'package' system_information['stackstorm']['packages'] = package_list repo_path = '/opt/openstack/mistral' revision_hash = get_repo_latest_revision_hash(repo_path=repo_path) system_information['mistral']['installation_method'] = 'source' system_information['mistral']['revision_hash'] = revision_hash return system_information
'Parse query string for the provided request. :rtype: ``dict``'
def _parse_query_params(self, request):
query_string = request.query_string query_params = dict(urlparse.parse_qsl(query_string)) return query_params
'Return a value for the provided query param and optionally cast it for boolean types. If the requested query parameter is not provided, default value is returned instead. :param request: Request object. :param param_name: Name of the param to retrieve the value for. :type param_name: ``str`` :param param_type: Type of the query param (e.g. "bool"). :type param_type: ``str`` :param default_value: Value to return if query param is not provided. :type default_value: ``object``'
def _get_query_param_value(self, request, param_name, param_type, default_value=None):
query_params = self._parse_query_params(request=request) value = query_params.get(param_name, default_value) if ((param_type == 'bool') and isinstance(value, six.string_types)): value = transform_to_bool(value) return value
'Return a value for mask_secrets which can be used in masking secret properties to be retruned by any API. The default value is as per the config however admin users have the ability to override by passing in a special query parameter ?show_secrets=True. :rtype: ``bool``'
def _get_mask_secrets(self, requester_user, show_secrets=None):
mask_secrets = cfg.CONF.api.mask_secrets if (show_secrets and rbac_utils.user_is_admin(user_db=requester_user)): mask_secrets = False return mask_secrets
'Create a new rule. Handles requests: POST /rules/'
def post(self, rule, requester_user):
permission_type = PermissionType.RULE_CREATE rbac_utils.assert_user_has_resource_api_permission(user_db=requester_user, resource_api=rule, permission_type=permission_type) try: rule_db = RuleAPI.to_model(rule) LOG.debug('/rules/ POST verified RuleAPI and formulated RuleDB=%s', rule_db) assert_user_has_rule_trigger_and_action_permission(user_db=requester_user, rule_api=rule) rule_db = Rule.add_or_update(rule_db) increment_trigger_ref_count(rule_api=rule) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for rule data=%s.', rule) abort(http_client.BAD_REQUEST, str(e)) return except (ValueValidationException, jsonschema.ValidationError) as e: LOG.exception('Validation failed for rule data=%s.', rule) abort(http_client.BAD_REQUEST, str(e)) return except TriggerDoesNotExistException as e: msg = ('Trigger "%s" defined in the rule does not exist in system or it\'s missing required "parameters" attribute' % rule.trigger['type']) LOG.exception(msg) abort(http_client.BAD_REQUEST, msg) return extra = {'rule_db': rule_db} LOG.audit(('Rule created. Rule.id=%s' % rule_db.id), extra=extra) rule_api = RuleAPI.from_model(rule_db) return Response(json=rule_api, status=exc.HTTPCreated.code)
'Delete a rule. Handles requests: DELETE /rules/1'
def delete(self, rule_ref_or_id, requester_user):
rule_db = self._get_by_ref_or_id(ref_or_id=rule_ref_or_id) permission_type = PermissionType.RULE_DELETE rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=rule_db, permission_type=permission_type) LOG.debug('DELETE /rules/ lookup with id=%s found object: %s', rule_ref_or_id, rule_db) try: Rule.delete(rule_db) except Exception as e: LOG.exception('Database delete encountered exception during delete of id="%s".', rule_ref_or_id) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return cleanup_trigger_db_for_rule(rule_db) extra = {'rule_db': rule_db} LOG.audit(('Rule deleted. Rule.id=%s.' % rule_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'List RuleType objects by id. Handle: GET /ruletypes/1'
def get_one(self, id):
ruletype_db = RuleTypesController.__get_by_id(id) ruletype_api = RuleTypeAPI.from_model(ruletype_db) return ruletype_api
'List all RuleType objects. Handles requests: GET /ruletypes/'
def get_all(self):
ruletype_dbs = RuleType.get_all() ruletype_apis = [RuleTypeAPI.from_model(runnertype_db) for runnertype_db in ruletype_dbs] return ruletype_apis
'remove the trailing and leading / so that the hook url and those coming from trigger parameters end up being the same.'
def _get_normalized_url(self, trigger):
return trigger['parameters']['url'].strip('/')
'List all distinct filters. Handles requests: GET /executions/views/filters[?types=action,rule] :param types: Comma delimited string of filter types to output. :type types: ``str``'
def get_all(self, types=None):
filters = {} for (name, field) in six.iteritems(SUPPORTED_FILTERS): if ((name not in IGNORE_FILTERS) and ((not types) or (name in types))): if (name not in FILTERS_WITH_VALID_NULL_VALUES): query = {field.replace('.', '__'): {'$ne': None}} else: query = {} filters[name] = ActionExecution.distinct(field=field, **query) return filters
'Reduces the number of queries to be made to the DB by creating sets of Actions, Triggers and TriggerTypes.'
def _get_referenced_models(self, rules):
action_refs = set() trigger_refs = set() trigger_type_refs = set() for rule in rules: action_refs.add(rule['action']['ref']) trigger_refs.add(rule['trigger']['ref']) trigger_type_refs.add(rule['trigger']['type']) action_by_refs = {} trigger_by_refs = {} trigger_type_by_refs = {} def ref_query_args(ref): return {'ref': ref} def name_pack_query_args(ref): resource_ref = ResourceReference.from_string_reference(ref=ref) return {'name': resource_ref.name, 'pack': resource_ref.pack} action_dbs = self._get_entities(model_persistence=Action, refs=action_refs, query_args=ref_query_args) for action_db in action_dbs: action_by_refs[action_db.ref] = action_db trigger_dbs = self._get_entities(model_persistence=Trigger, refs=trigger_refs, query_args=name_pack_query_args) for trigger_db in trigger_dbs: trigger_by_refs[trigger_db.get_reference().ref] = trigger_db trigger_type_dbs = self._get_entities(model_persistence=TriggerType, refs=trigger_type_refs, query_args=name_pack_query_args) for trigger_type_db in trigger_type_dbs: trigger_type_by_refs[trigger_type_db.get_reference().ref] = trigger_type_db return (action_by_refs, trigger_by_refs, trigger_type_by_refs)
'Returns all the entities for the supplied refs. model_persistence is the persistence object that will be used to get to the correct query method and the query_args function to return the ref specific query argument. This is such a weirdly specific method that it is likely better only in this context.'
def _get_entities(self, model_persistence, refs, query_args):
q = None for ref in refs: if (not q): q = Q(**query_args(ref)) else: q |= Q(**query_args(ref)) if q: return model_persistence._get_impl().model.objects(q) return []
'Retrieve configs for all the packs. Handles requests: GET /configs/'
def get_all(self, requester_user, sort=None, offset=0, limit=None, show_secrets=False, **raw_filters):
from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} return super(PackConfigsController, self)._get_all(sort=sort, offset=offset, limit=limit, from_model_kwargs=from_model_kwargs, raw_filters=raw_filters)
'Retrieve config for a particular pack. Handles requests: GET /configs/<pack_ref>'
def get_one(self, pack_ref, requester_user, show_secrets=False):
from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} try: instance = packs_service.get_pack_by_ref(pack_ref=pack_ref) except StackStormDBObjectNotFoundError: msg = ('Unable to identify resource with pack_ref "%s".' % pack_ref) abort(http_client.NOT_FOUND, msg) rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=instance, permission_type=PermissionType.PACK_VIEW) return self._get_one_by_pack_ref(pack_ref=pack_ref, from_model_kwargs=from_model_kwargs)
'Create a new config for a pack. Handles requests: POST /configs/<pack_ref>'
def put(self, pack_uninstall_request, pack_ref, requester_user, show_secrets=False):
try: config_api = ConfigAPI(pack=pack_ref, values=vars(pack_uninstall_request)) config_api.validate(validate_against_schema=True) except jsonschema.ValidationError as e: raise ValueValidationException(str(e)) self._dump_config_to_disk(config_api) config_db = ConfigsRegistrar.save_model(config_api) mask_secrets = self._get_mask_secrets(requester_user, show_secrets=show_secrets) return ConfigAPI.from_model(config_db, mask_secrets=mask_secrets)
'Retrieve config schema for all the packs. Handles requests: GET /config_schema/'
def get_all(self, sort=None, offset=0, limit=None, **raw_filters):
return super(PackConfigSchemasController, self)._get_all(sort=sort, offset=offset, limit=limit, raw_filters=raw_filters)
'Retrieve config schema for a particular pack. Handles requests: GET /config_schema/<pack_ref>'
def get_one(self, pack_ref, requester_user):
packs_controller._get_one_by_ref_or_id(ref_or_id=pack_ref, requester_user=requester_user) return self._get_one_by_pack_ref(pack_ref=pack_ref)
':param liveaction: LiveActionAPI object. :type liveaction: :class:`LiveActionAPI`'
def _handle_schedule_execution(self, liveaction_api, requester_user, context_string=None, show_secrets=False):
if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) action_ref = liveaction_api.action action_db = action_utils.get_action_by_ref(action_ref) if (not action_db): message = ('Action "%s" cannot be found.' % action_ref) LOG.warning(message) abort(http_client.BAD_REQUEST, message) assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_db, permission_type=PermissionType.ACTION_EXECUTE) user = (liveaction_api.user or requester_user.name) assert_user_is_admin_if_user_query_param_is_provided(user_db=requester_user, user=user) try: return self._schedule_execution(liveaction=liveaction_api, requester_user=requester_user, user=user, context_string=context_string, show_secrets=show_secrets, pack=action_db.pack) except ValueError as e: LOG.exception('Unable to execute action.') abort(http_client.BAD_REQUEST, str(e)) except jsonschema.ValidationError as e: LOG.exception('Unable to execute action. Parameter validation failed.') abort(http_client.BAD_REQUEST, re.sub("u'([^']*)'", "'\\1'", e.message)) except TraceNotFoundException as e: abort(http_client.BAD_REQUEST, str(e)) except ValueValidationException as e: raise e except Exception as e: LOG.exception('Unable to execute action. Unexpected error encountered.') abort(http_client.INTERNAL_SERVER_ERROR, str(e))
'Retrieve result object for the provided action execution. :param id: Action execution ID. :type id: ``str`` :rtype: ``dict``'
def _get_result_object(self, id):
fields = ['result'] action_exec_db = self.access.impl.model.objects.filter(id=id).only(*fields).get() return action_exec_db.result
'Retrieve children for the provided action execution. :rtype: ``list``'
def get_one(self, id, requester_user, depth=(-1), result_fmt=None, show_secrets=False):
instance = self._get_by_id(resource_id=id) permission_type = PermissionType.EXECUTION_VIEW rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=instance, permission_type=permission_type) return self._get_children(id_=id, depth=depth, result_fmt=result_fmt, requester_user=requester_user, show_secrets=show_secrets)
'Retrieve a particular attribute for the provided action execution. Handles requests: GET /executions/<id>/attribute/<attribute name> :rtype: ``dict``'
def get(self, id, attribute, requester_user):
fields = [attribute, 'action__pack', 'action__uid'] fields = self._validate_exclude_fields(fields) action_exec_db = self.access.impl.model.objects.filter(id=id).only(*fields).get() permission_type = PermissionType.EXECUTION_VIEW rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_exec_db, permission_type=permission_type) result = getattr(action_exec_db, attribute, None) return result
'Re-run the provided action execution optionally specifying override parameters. Handles requests: POST /executions/<id>/re_run'
def post(self, spec_api, id, requester_user, no_merge=False, show_secrets=False):
if ((spec_api.tasks or spec_api.reset) and spec_api.parameters): raise ValueError('Parameters override is not supported when re-running task(s) for a workflow.') if spec_api.parameters: assert isinstance(spec_api.parameters, dict) if spec_api.tasks: assert isinstance(spec_api.tasks, list) if spec_api.reset: assert isinstance(spec_api.reset, list) if list((set(spec_api.reset) - set(spec_api.tasks))): raise ValueError('List of tasks to reset does not match the tasks to rerun.') no_merge = cast_argument_value(value_type=bool, value=no_merge) existing_execution = self._get_one_by_id(id=id, exclude_fields=self.exclude_fields, requester_user=requester_user, permission_type=PermissionType.EXECUTION_VIEW) if (spec_api.tasks and (existing_execution.runner['name'] != 'mistral-v2')): raise ValueError('Task option is only supported for Mistral workflows.') new_parameters = {} if (not no_merge): new_parameters.update(getattr(existing_execution, 'parameters', {})) new_parameters.update(spec_api.parameters) action_ref = existing_execution.action['ref'] context = {'re-run': {'ref': id}} if spec_api.tasks: context['re-run']['tasks'] = spec_api.tasks if spec_api.reset: context['re-run']['reset'] = spec_api.reset trace = trace_service.get_trace_db_by_action_execution(action_execution_id=existing_execution.id) if trace: context['trace_context'] = {'id_': str(trace.id)} new_liveaction_api = LiveActionCreateAPI(action=action_ref, context=context, parameters=new_parameters, user=spec_api.user) return self._handle_schedule_execution(liveaction_api=new_liveaction_api, requester_user=requester_user, show_secrets=show_secrets)
'List all executions. Handles requests: GET /executions[?exclude_attributes=result,trigger_instance] :param exclude_attributes: Comma delimited string of attributes to exclude from the object. :type exclude_attributes: ``str``'
def get_all(self, requester_user, exclude_attributes=None, sort=None, offset=0, limit=None, show_secrets=False, **raw_filters):
if exclude_attributes: exclude_fields = exclude_attributes.split(',') else: exclude_fields = None exclude_fields = self._validate_exclude_fields(exclude_fields=exclude_fields) query_options = None if (raw_filters.get('timestamp_lt', None) or raw_filters.get('sort_desc', None)): query_options = {'sort': ['-start_timestamp', 'action.ref']} elif (raw_filters.get('timestamp_gt', None) or raw_filters.get('sort_asc', None)): query_options = {'sort': ['+start_timestamp', 'action.ref']} from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} return self._get_action_executions(exclude_fields=exclude_fields, from_model_kwargs=from_model_kwargs, sort=sort, offset=offset, limit=limit, query_options=query_options, raw_filters=raw_filters)
'Retrieve a single execution. Handles requests: GET /executions/<id>[?exclude_attributes=result,trigger_instance] :param exclude_attributes: Comma delimited string of attributes to exclude from the object. :type exclude_attributes: ``str``'
def get_one(self, id, requester_user, exclude_attributes=None, show_secrets=False):
if exclude_attributes: exclude_fields = exclude_attributes.split(',') else: exclude_fields = None exclude_fields = self._validate_exclude_fields(exclude_fields=exclude_fields) from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} return self._get_one_by_id(id=id, exclude_fields=exclude_fields, requester_user=requester_user, from_model_kwargs=from_model_kwargs, permission_type=PermissionType.EXECUTION_VIEW)
'Updates a single execution. Handles requests: PUT /executions/<id>'
def put(self, id, liveaction_api, requester_user, show_secrets=False):
if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} execution_api = self._get_one_by_id(id=id, requester_user=requester_user, from_model_kwargs=from_model_kwargs, permission_type=PermissionType.EXECUTION_STOP) if (not execution_api): abort(http_client.NOT_FOUND, ('Execution with id %s not found.' % id)) liveaction_id = execution_api.liveaction['id'] if (not liveaction_id): abort(http_client.INTERNAL_SERVER_ERROR, ('Execution object missing link to liveaction %s.' % liveaction_id)) try: liveaction_db = LiveAction.get_by_id(liveaction_id) except: abort(http_client.INTERNAL_SERVER_ERROR, ('Execution object missing link to liveaction %s.' % liveaction_id)) if (liveaction_db.status in LIVEACTION_COMPLETED_STATES): abort(http_client.BAD_REQUEST, 'Execution is already in completed state.') try: liveaction_db = action_service.update_status(liveaction_db, liveaction_api.status, result=getattr(liveaction_api, 'result', None)) except Exception as e: LOG.exception('Failed updating liveaction %s. %s', liveaction_db.id, str(e)) abort(http_client.INTERNAL_SERVER_ERROR, 'Failed updating execution.') execution_api = self._get_one_by_id(id=id, requester_user=requester_user, from_model_kwargs=from_model_kwargs, permission_type=PermissionType.EXECUTION_STOP) return execution_api
'Stops a single execution. Handles requests: DELETE /executions/<id>'
def delete(self, id, requester_user, show_secrets=False):
if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) from_model_kwargs = {'mask_secrets': self._get_mask_secrets(requester_user, show_secrets=show_secrets)} execution_api = self._get_one_by_id(id=id, requester_user=requester_user, from_model_kwargs=from_model_kwargs, permission_type=PermissionType.EXECUTION_STOP) if (not execution_api): abort(http_client.NOT_FOUND, ('Execution with id %s not found.' % id)) liveaction_id = execution_api.liveaction['id'] if (not liveaction_id): abort(http_client.INTERNAL_SERVER_ERROR, ('Execution object missing link to liveaction %s.' % liveaction_id)) try: liveaction_db = LiveAction.get_by_id(liveaction_id) except: abort(http_client.INTERNAL_SERVER_ERROR, ('Execution object missing link to liveaction %s.' % liveaction_id)) if (liveaction_db.status == LIVEACTION_STATUS_CANCELED): LOG.info(('Action %s already in "canceled" state; returning execution object.' % liveaction_db.id)) return execution_api if (liveaction_db.status not in LIVEACTION_CANCELABLE_STATES): abort(http_client.OK, ('Action cannot be canceled. State = %s.' % liveaction_db.status)) try: (liveaction_db, execution_db) = action_service.request_cancellation(liveaction_db, (requester_user.name or cfg.CONF.system_user.user)) except: LOG.exception('Failed requesting cancellation for liveaction %s.', liveaction_db.id) abort(http_client.INTERNAL_SERVER_ERROR, 'Failed canceling execution.') return ActionExecutionAPI.from_model(execution_db, mask_secrets=from_model_kwargs['mask_secrets'])
':param exclude_fields: A list of object fields to exclude. :type exclude_fields: ``list``'
def _get_action_executions(self, exclude_fields=None, sort=None, offset=0, limit=None, query_options=None, raw_filters=None, from_model_kwargs=None):
if (limit is None): limit = self.default_limit limit = int(limit) LOG.debug('Retrieving all action executions with filters=%s', raw_filters) return super(ActionExecutionsController, self)._get_all(exclude_fields=exclude_fields, from_model_kwargs=from_model_kwargs, sort=sort, offset=offset, limit=limit, query_options=query_options, raw_filters=raw_filters)
'List api keys. Handle: GET /apikeys/1'
def get_one(self, api_key_id_or_key, requester_user, show_secrets=None):
api_key_db = None try: api_key_db = ApiKey.get_by_key_or_id(api_key_id_or_key) except ApiKeyNotFoundError: msg = ('ApiKey matching %s for reference and id not found.' % api_key_id_or_key) LOG.exception(msg) abort(http_client.NOT_FOUND, msg) permission_type = PermissionType.API_KEY_VIEW rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=api_key_db, permission_type=permission_type) try: mask_secrets = self._get_mask_secrets(show_secrets=show_secrets, requester_user=requester_user) return ApiKeyAPI.from_model(api_key_db, mask_secrets=mask_secrets) except (ValidationError, ValueError) as e: LOG.exception('Failed to serialize API key.') abort(http_client.INTERNAL_SERVER_ERROR, str(e))
'List all keys. Handles requests: GET /apikeys/'
def get_all(self, requester_user, show_secrets=None, limit=None, offset=0):
mask_secrets = self._get_mask_secrets(show_secrets=show_secrets, requester_user=requester_user) if (limit and (int(limit) > self.max_limit)): msg = ('Limit "%s" specified, maximum value is "%s"' % (limit, self.max_limit)) raise ValueError(msg) api_key_dbs = ApiKey.get_all(limit=limit, offset=offset) try: api_keys = [ApiKeyAPI.from_model(api_key_db, mask_secrets=mask_secrets) for api_key_db in api_key_dbs] except OverflowError: msg = ('Offset "%s" specified is more than 32 bit int' % offset) raise ValueError(msg) resp = Response(json=api_keys) resp.headers['X-Total-Count'] = str(api_key_dbs.count()) if limit: resp.headers['X-Limit'] = str(limit) return resp
'Create a new entry.'
def post(self, api_key_api, requester_user):
permission_type = PermissionType.API_KEY_CREATE rbac_utils.assert_user_has_resource_api_permission(user_db=requester_user, resource_api=api_key_api, permission_type=permission_type) api_key_db = None api_key = None try: if (not getattr(api_key_api, 'user', None)): if requester_user: api_key_api.user = requester_user.name else: api_key_api.user = cfg.CONF.system_user.user try: User.get_by_name(api_key_api.user) except StackStormDBObjectNotFoundError: user_db = UserDB(name=api_key_api.user) User.add_or_update(user_db) extra = {'username': api_key_api.user, 'user': user_db} LOG.audit(('Registered new user "%s".' % api_key_api.user), extra=extra) if (not getattr(api_key_api, 'key_hash', None)): (api_key, api_key_hash) = auth_util.generate_api_key_and_hash() api_key_api.key_hash = api_key_hash api_key_db = ApiKey.add_or_update(ApiKeyAPI.to_model(api_key_api)) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for api_key data=%s.', api_key_api) abort(http_client.BAD_REQUEST, str(e)) extra = {'api_key_db': api_key_db} LOG.audit(('ApiKey created. ApiKey.id=%s' % api_key_db.id), extra=extra) api_key_create_response_api = ApiKeyCreateResponseAPI.from_model(api_key_db) api_key_create_response_api.key = api_key return Response(json=api_key_create_response_api, status=http_client.CREATED)
'Delete the key value pair. Handles requests: DELETE /apikeys/1'
def delete(self, api_key_id_or_key, requester_user):
api_key_db = ApiKey.get_by_key_or_id(api_key_id_or_key) permission_type = PermissionType.API_KEY_DELETE rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=api_key_db, permission_type=permission_type) ApiKey.delete(api_key_db) extra = {'api_key_db': api_key_db} LOG.audit(('ApiKey deleted. ApiKey.id=%s' % api_key_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'Create a new policy. Handles requests: POST /policies/'
def post(self, instance, requester_user):
permission_type = PermissionType.POLICY_CREATE rbac_utils.assert_user_has_resource_api_permission(user_db=requester_user, resource_api=instance, permission_type=permission_type) op = 'POST /policies/' db_model = self.model.to_model(instance) LOG.debug('%s verified object: %s', op, db_model) db_model = self.access.add_or_update(db_model) LOG.debug('%s created object: %s', op, db_model) LOG.audit(('Policy created. Policy.id=%s' % db_model.id), extra={'policy_db': db_model}) exec_result = self.model.from_model(db_model) return Response(json=exec_result, status=http_client.CREATED)
'Delete a policy. Handles requests: POST /policies/1?_method=delete DELETE /policies/1 DELETE /policies/mypack.mypolicy'
def delete(self, ref_or_id, requester_user):
op = ('DELETE /policies/%s/' % ref_or_id) db_model = self._get_by_ref_or_id(ref_or_id=ref_or_id) LOG.debug('%s found object: %s', op, db_model) permission_type = PermissionType.POLICY_DELETE rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=db_model, permission_type=permission_type) try: validate_not_part_of_system_pack(db_model) except ValueValidationException as e: LOG.exception('%s unable to delete object from system pack.', op) abort(http_client.BAD_REQUEST, str(e)) try: self.access.delete(db_model) except Exception as e: LOG.exception('%s unable to delete object: %s', op, db_model) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return LOG.debug('%s deleted object: %s', op, db_model) LOG.audit(('Policy deleted. Policy.id=%s' % db_model.id), extra={'policy_db': db_model}) return Response(status=http_client.NO_CONTENT)
'Check if all listed indexes are healthy: they should be reachable, return valid JSON objects, and yield more than one result.'
def get(self):
(_, status) = packs_service.fetch_pack_index(allow_empty=True) health = {'indexes': {'count': len(status), 'valid': 0, 'invalid': 0, 'errors': {}, 'status': status}, 'packs': {'count': 0}} for index in status: if index['error']: error_count = (health['indexes']['errors'].get(index['error'], 0) + 1) health['indexes']['invalid'] += 1 health['indexes']['errors'][index['error']] = error_count else: health['indexes']['valid'] += 1 health['packs']['count'] += index['packs'] return health
'Note: In this case "ref" is pack name and not StackStorm\'s ResourceReference.'
def _get_by_ref(self, ref, exclude_fields=None):
resource_db = self.access.query(ref=ref, exclude_fields=exclude_fields).first() return resource_db
'List merged action & runner parameters by action id. Handle: GET /actions/views/parameters/1'
@staticmethod def _get_one(action_id, requester_user):
action_db = LookupUtils._get_action_by_id(action_id) LOG.info('Found action: %s, runner: %s', action_db, action_db.runner_type['name']) permission_type = PermissionType.ACTION_VIEW rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_db, permission_type=permission_type) runner_db = LookupUtils._get_runner_by_name(action_db.runner_type['name']) all_params = action_param_utils.get_params_view(action_db=action_db, runner_db=runner_db, merged_only=True) return {'parameters': all_params}
'List action by id. Handle: GET /actions/views/overview/1'
def get_one(self, ref_or_id, requester_user):
resp = super(OverviewController, self)._get_one(ref_or_id, requester_user=requester_user, permission_type=PermissionType.ACTION_VIEW) action_api = ActionAPI(**resp.json) result = self._transform_action_api(action_api=action_api, requester_user=requester_user) resp.json = result return resp
'List all actions. Handles requests: GET /actions/views/overview'
def get_all(self, sort=None, offset=0, limit=None, requester_user=None, **raw_filters):
resp = super(OverviewController, self)._get_all(sort=sort, offset=offset, limit=limit, raw_filters=raw_filters) result = [] for item in resp.json: action_api = ActionAPI(**item) result.append(self._transform_action_api(action_api=action_api, requester_user=requester_user)) resp.json = result return resp
'Outputs the file associated with action entry_point Handles requests: GET /actions/views/entry_point/1'
def get_one(self, ref_or_id, requester_user):
LOG.info('GET /actions/views/entry_point with ref_or_id=%s', ref_or_id) action_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) permission_type = PermissionType.ACTION_VIEW rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_db, permission_type=permission_type) pack = getattr(action_db, 'pack', None) entry_point = getattr(action_db, 'entry_point', None) abs_path = utils.get_entry_point_abs_path(pack, entry_point) if (not abs_path): raise StackStormDBObjectNotFoundError(('Action ref_or_id=%s has no entry_point to output' % ref_or_id)) with open(abs_path) as file: content = file.read() return content
'List all the available permission types. Handles requests: GET /rbac/permission_types'
def get_all(self, requester_user):
rbac_utils.assert_user_is_admin(user_db=requester_user) result = get_resource_permission_types_with_descriptions() return result
'List all the available permission types for a particular resource type. Handles requests: GET /rbac/permission_types/<resource type>'
def get_one(self, resource_type, requester_user):
rbac_utils.assert_user_is_admin(user_db=requester_user) all_permission_types = get_resource_permission_types_with_descriptions() permission_types = all_permission_types.get(resource_type, None) if (permission_types is None): raise exc.HTTPNotFound(('Invalid resource type: %s' % resource_type)) return permission_types
'Create a new triggertype. Handles requests: POST /triggertypes/'
def post(self, triggertype):
try: triggertype_db = TriggerTypeAPI.to_model(triggertype) triggertype_db = TriggerType.add_or_update(triggertype_db) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for triggertype data=%s.', triggertype) abort(http_client.BAD_REQUEST, str(e)) return else: extra = {'triggertype_db': triggertype_db} LOG.audit(('TriggerType created. TriggerType.id=%s' % triggertype_db.id), extra=extra) if (not triggertype_db.parameters_schema): TriggerTypeController._create_shadow_trigger(triggertype_db) triggertype_api = TriggerTypeAPI.from_model(triggertype_db) return Response(json=triggertype_api, status=http_client.CREATED)
'Delete a triggertype. Handles requests: DELETE /triggertypes/1 DELETE /triggertypes/pack.name'
def delete(self, triggertype_ref_or_id):
LOG.info('DELETE /triggertypes/ with ref_or_id=%s', triggertype_ref_or_id) triggertype_db = self._get_by_ref_or_id(ref_or_id=triggertype_ref_or_id) triggertype_id = triggertype_db.id try: validate_not_part_of_system_pack(triggertype_db) except ValueValidationException as e: abort(http_client.BAD_REQUEST, str(e)) try: TriggerType.delete(triggertype_db) except Exception as e: LOG.exception('Database delete encountered exception during delete of id="%s". ', triggertype_id) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return else: extra = {'triggertype': triggertype_db} LOG.audit(('TriggerType deleted. TriggerType.id=%s' % triggertype_db.id), extra=extra) if (not triggertype_db.parameters_schema): TriggerTypeController._delete_shadow_trigger(triggertype_db) return Response(status=http_client.NO_CONTENT)
'List trigger by id. Handle: GET /triggers/1'
def get_one(self, trigger_id):
trigger_db = TriggerController.__get_by_id(trigger_id) trigger_api = TriggerAPI.from_model(trigger_db) return trigger_api
'List all triggers. Handles requests: GET /triggers/'
def get_all(self):
trigger_dbs = Trigger.get_all() trigger_apis = [TriggerAPI.from_model(trigger_db) for trigger_db in trigger_dbs] return trigger_apis
'Create a new trigger. Handles requests: POST /triggers/'
def post(self, trigger):
try: trigger_db = TriggerService.create_trigger_db(trigger) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for trigger data=%s.', trigger) abort(http_client.BAD_REQUEST, str(e)) return extra = {'trigger': trigger_db} LOG.audit(('Trigger created. Trigger.id=%s' % trigger_db.id), extra=extra) trigger_api = TriggerAPI.from_model(trigger_db) return Response(json=trigger_api, status=http_client.CREATED)
'Delete a trigger. Handles requests: DELETE /triggers/1'
def delete(self, trigger_id):
LOG.info('DELETE /triggers/ with id=%s', trigger_id) trigger_db = TriggerController.__get_by_id(trigger_id) try: Trigger.delete(trigger_db) except Exception as e: LOG.exception('Database delete encountered exception during delete of id="%s". ', trigger_id) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return extra = {'trigger_db': trigger_db} LOG.audit(('Trigger deleted. Trigger.id=%s' % trigger_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'Re-send the provided trigger instance optionally specifying override parameters. Handles requests: POST /triggerinstance/<id>/re_emit POST /triggerinstance/<id>/re_send'
def post(self, trigger_instance_id):
existing_trigger_instance = self._get_one_by_id(id=trigger_instance_id, permission_type=None, requester_user=None) new_payload = copy.deepcopy(existing_trigger_instance.payload) new_payload['__context'] = {'original_id': trigger_instance_id} try: self.trigger_dispatcher.dispatch(existing_trigger_instance.trigger, new_payload) return {'message': ('Trigger instance %s succesfully re-sent.' % trigger_instance_id), 'payload': new_payload} except Exception as e: abort(http_client.INTERNAL_SERVER_ERROR, str(e))
'List triggerinstance by instance_id. Handle: GET /triggerinstances/1'
def get_one(self, instance_id):
return self._get_one_by_id(instance_id, permission_type=None, requester_user=None)
'List all triggerinstances. Handles requests: GET /triggerinstances/'
def get_all(self, sort=None, offset=0, limit=None, **raw_filters):
trigger_instances = self._get_trigger_instances(sort=sort, offset=offset, limit=limit, raw_filters=raw_filters) return trigger_instances
'Run a chatops command Handles requests: POST /actionalias/match'
def match(self, action_alias_match_api):
command = action_alias_match_api.command try: aliases_resp = super(ActionAliasController, self)._get_all() aliases = [ActionAliasAPI(**alias) for alias in aliases_resp.json] matches = match_command_to_alias(command, aliases) if (len(matches) > 1): raise ActionAliasAmbiguityException(("Command '%s' matched more than 1 pattern" % command), matches=matches, command=command) elif (len(matches) == 0): raise ActionAliasAmbiguityException(("Command '%s' matched no patterns" % command), matches=[], command=command) return [self._match_tuple_to_dict(match) for match in matches] except ActionAliasAmbiguityException as e: LOG.exception('Command "%s" matched (%s) patterns.', e.command, len(e.matches)) return abort(http_client.BAD_REQUEST, str(e))
'Get available help strings for action aliases. Handles requests: GET /actionalias/help'
def help(self, filter, pack, limit, offset, **kwargs):
try: aliases_resp = super(ActionAliasController, self)._get_all(**kwargs) aliases = [ActionAliasAPI(**alias) for alias in aliases_resp.json] return generate_helpstring_result(aliases, filter, pack, int(limit), int(offset)) except TypeError as e: LOG.exception('Helpstring request contains an invalid data type: %s.', str(e)) return abort(http_client.BAD_REQUEST, str(e))
'Create a new ActionAlias. Handles requests: POST /actionalias/'
def post(self, action_alias, requester_user):
permission_type = PermissionType.ACTION_ALIAS_CREATE rbac_utils.assert_user_has_resource_api_permission(user_db=requester_user, resource_api=action_alias, permission_type=permission_type) try: action_alias_db = ActionAliasAPI.to_model(action_alias) LOG.debug('/actionalias/ POST verified ActionAliasAPI and formulated ActionAliasDB=%s', action_alias_db) action_alias_db = ActionAlias.add_or_update(action_alias_db) except (ValidationError, ValueError, ValueValidationException) as e: LOG.exception('Validation failed for action alias data=%s.', action_alias) abort(http_client.BAD_REQUEST, str(e)) return extra = {'action_alias_db': action_alias_db} LOG.audit(('Action alias created. ActionAlias.id=%s' % action_alias_db.id), extra=extra) action_alias_api = ActionAliasAPI.from_model(action_alias_db) return Response(json=action_alias_api, status=http_client.CREATED)
'Update an action alias. Handles requests: PUT /actionalias/1'
def put(self, action_alias, ref_or_id, requester_user):
action_alias_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) LOG.debug('PUT /actionalias/ lookup with id=%s found object: %s', ref_or_id, action_alias_db) permission_type = PermissionType.ACTION_ALIAS_MODIFY rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_alias_db, permission_type=permission_type) if (not hasattr(action_alias, 'id')): action_alias.id = None try: if ((action_alias.id is not None) and (action_alias.id is not '') and (action_alias.id != ref_or_id)): LOG.warning('Discarding mismatched id=%s found in payload and using uri_id=%s.', action_alias.id, ref_or_id) old_action_alias_db = action_alias_db action_alias_db = ActionAliasAPI.to_model(action_alias) action_alias_db.id = ref_or_id action_alias_db = ActionAlias.add_or_update(action_alias_db) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for action alias data=%s', action_alias) abort(http_client.BAD_REQUEST, str(e)) return extra = {'old_action_alias_db': old_action_alias_db, 'new_action_alias_db': action_alias_db} LOG.audit(('Action alias updated. ActionAlias.id=%s.' % action_alias_db.id), extra=extra) action_alias_api = ActionAliasAPI.from_model(action_alias_db) return action_alias_api
'Delete an action alias. Handles requests: DELETE /actionalias/1'
def delete(self, ref_or_id, requester_user):
action_alias_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) LOG.debug('DELETE /actionalias/ lookup with id=%s found object: %s', ref_or_id, action_alias_db) permission_type = PermissionType.ACTION_ALIAS_DELETE rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_alias_db, permission_type=permission_type) try: ActionAlias.delete(action_alias_db) except Exception as e: LOG.exception('Database delete encountered exception during delete of id="%s".', ref_or_id) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return extra = {'action_alias_db': action_alias_db} LOG.audit(('Action alias deleted. ActionAlias.id=%s.' % action_alias_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'List key by name. Handle: GET /keys/key1'
def get_one(self, name, requester_user, scope=FULL_SYSTEM_SCOPE, user=None, decrypt=False):
if (not scope): scope = FULL_SYSTEM_SCOPE if user: scope = FULL_USER_SCOPE if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) scope = get_datastore_full_scope(scope) self._validate_scope(scope=scope) is_admin = rbac_utils.user_is_admin(user_db=requester_user) self._validate_decrypt_query_parameter(decrypt=decrypt, scope=scope, is_admin=is_admin, requester_user=requester_user) user = (user or requester_user.name) assert_user_is_admin_if_user_query_param_is_provided(user_db=requester_user, user=user) key_ref = get_key_reference(scope=scope, name=name, user=user) from_model_kwargs = {'mask_secrets': (not decrypt)} kvp_api = self._get_one_by_scope_and_name(name=key_ref, scope=scope, from_model_kwargs=from_model_kwargs) return kvp_api
'List all keys. Handles requests: GET /keys/'
def get_all(self, requester_user, prefix=None, scope=FULL_SYSTEM_SCOPE, user=None, decrypt=False, sort=None, offset=0, limit=None, **raw_filters):
if (not scope): scope = FULL_SYSTEM_SCOPE if user: scope = FULL_USER_SCOPE if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) scope = get_datastore_full_scope(scope) is_all_scope = (scope == ALL_SCOPE) is_admin = rbac_utils.user_is_admin(user_db=requester_user) if (is_all_scope and (not is_admin)): msg = '"all" scope requires administrator access' raise AccessDeniedError(message=msg, user_db=requester_user) self._validate_decrypt_query_parameter(decrypt=decrypt, scope=scope, is_admin=is_admin, requester_user=requester_user) user = (user or requester_user.name) assert_user_is_admin_if_user_query_param_is_provided(user_db=requester_user, user=user) from_model_kwargs = {'mask_secrets': (not decrypt)} if (scope and (scope not in ALL_SCOPE)): self._validate_scope(scope=scope) raw_filters['scope'] = scope if ((scope == USER_SCOPE) or (scope == FULL_USER_SCOPE)): if prefix: prefix = get_key_reference(name=prefix, scope=scope, user=user) else: prefix = get_key_reference(name='', scope=scope, user=user) raw_filters['prefix'] = prefix kvp_apis = super(KeyValuePairController, self)._get_all(from_model_kwargs=from_model_kwargs, sort=sort, offset=offset, limit=limit, raw_filters=raw_filters) return kvp_apis
'Create a new entry or update an existing one.'
def put(self, kvp, name, requester_user, scope=FULL_SYSTEM_SCOPE):
if (not scope): scope = FULL_SYSTEM_SCOPE if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) scope = getattr(kvp, 'scope', scope) scope = get_datastore_full_scope(scope) self._validate_scope(scope=scope) user = (getattr(kvp, 'user', requester_user.name) or requester_user.name) assert_user_is_admin_if_user_query_param_is_provided(user_db=requester_user, user=user) key_ref = get_key_reference(scope=scope, name=name, user=user) lock_name = self._get_lock_name_for_key(name=key_ref, scope=scope) LOG.debug('PUT scope: %s, name: %s', scope, name) with self._coordinator.get_lock(lock_name): try: existing_kvp_api = self._get_one_by_scope_and_name(scope=scope, name=key_ref) except StackStormDBObjectNotFoundError: existing_kvp_api = None kvp.name = key_ref kvp.scope = scope try: kvp_db = KeyValuePairAPI.to_model(kvp) if existing_kvp_api: kvp_db.id = existing_kvp_api.id kvp_db = KeyValuePair.add_or_update(kvp_db) except (ValidationError, ValueError) as e: LOG.exception('Validation failed for key value data=%s', kvp) abort(http_client.BAD_REQUEST, str(e)) return except CryptoKeyNotSetupException as e: LOG.exception(str(e)) abort(http_client.BAD_REQUEST, str(e)) return except InvalidScopeException as e: LOG.exception(str(e)) abort(http_client.BAD_REQUEST, str(e)) return extra = {'kvp_db': kvp_db} LOG.audit(('KeyValuePair updated. KeyValuePair.id=%s' % kvp_db.id), extra=extra) kvp_api = KeyValuePairAPI.from_model(kvp_db) return kvp_api
'Delete the key value pair. Handles requests: DELETE /keys/1'
def delete(self, name, requester_user, scope=FULL_SYSTEM_SCOPE, user=None):
if (not scope): scope = FULL_SYSTEM_SCOPE if (not requester_user): requester_user = UserDB(cfg.CONF.system_user.user) scope = get_datastore_full_scope(scope) self._validate_scope(scope=scope) user = (user or requester_user.name) assert_user_is_admin_if_user_query_param_is_provided(user_db=requester_user, user=user) key_ref = get_key_reference(scope=scope, name=name, user=user) lock_name = self._get_lock_name_for_key(name=key_ref, scope=scope) with self._coordinator.get_lock(lock_name): from_model_kwargs = {'mask_secrets': True} kvp_api = self._get_one_by_scope_and_name(name=key_ref, scope=scope, from_model_kwargs=from_model_kwargs) kvp_db = KeyValuePairAPI.to_model(kvp_api) LOG.debug('DELETE /keys/ lookup with scope=%s name=%s found object: %s', scope, name, kvp_db) try: KeyValuePair.delete(kvp_db) except Exception as e: LOG.exception('Database delete encountered exception during delete of name="%s". ', name) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return extra = {'kvp_db': kvp_db} LOG.audit(('KeyValuePair deleted. KeyValuePair.id=%s' % kvp_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'Retrieve a coordination lock name for the provided datastore item name. :param name: Datastore item name (PK). :type name: ``str``'
def _get_lock_name_for_key(self, name, scope=FULL_SYSTEM_SCOPE):
lock_name = ('kvp-crud-%s.%s' % (scope, name)) return lock_name
'Validate that the provider user is either admin or requesting to decrypt value for themselves.'
def _validate_decrypt_query_parameter(self, decrypt, scope, is_admin, requester_user):
is_user_scope = ((scope == USER_SCOPE) or (scope == FULL_USER_SCOPE)) if (decrypt and ((not is_user_scope) and (not is_admin))): msg = 'Decrypt option requires administrator access' raise AccessDeniedError(message=msg, user_db=requester_user)
'Create a new action. Handles requests: POST /actions/'
def post(self, action, requester_user):
permission_type = PermissionType.ACTION_CREATE rbac_utils.assert_user_has_resource_api_permission(user_db=requester_user, resource_api=action, permission_type=permission_type) try: validate_not_part_of_system_pack(action) action_validator.validate_action(action) except (ValidationError, ValueError, ValueValidationException, InvalidActionParameterException) as e: LOG.exception('Unable to create action data=%s', action) abort(http_client.BAD_REQUEST, str(e)) return data_files = getattr(action, 'data_files', []) written_data_files = [] if data_files: written_data_files = self._handle_data_files(pack_ref=action.pack, data_files=data_files) action_model = ActionAPI.to_model(action) LOG.debug('/actions/ POST verified ActionAPI object=%s', action) action_db = Action.add_or_update(action_model) LOG.debug('/actions/ POST saved ActionDB object=%s', action_db) if written_data_files: self._dispatch_trigger_for_written_data_files(action_db=action_db, written_data_files=written_data_files) extra = {'acion_db': action_db} LOG.audit(('Action created. Action.id=%s' % action_db.id), extra=extra) action_api = ActionAPI.from_model(action_db) return Response(json=action_api, status=http_client.CREATED)
'Delete an action. Handles requests: POST /actions/1?_method=delete DELETE /actions/1 DELETE /actions/mypack.myaction'
def delete(self, ref_or_id, requester_user):
action_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) action_id = action_db.id permission_type = PermissionType.ACTION_DELETE rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=action_db, permission_type=permission_type) try: validate_not_part_of_system_pack(action_db) except ValueValidationException as e: abort(http_client.BAD_REQUEST, str(e)) LOG.debug('DELETE /actions/ lookup with ref_or_id=%s found object: %s', ref_or_id, action_db) try: Action.delete(action_db) except Exception as e: LOG.error('Database delete encountered exception during delete of id="%s". Exception was %s', action_id, e) abort(http_client.INTERNAL_SERVER_ERROR, str(e)) return extra = {'action_db': action_db} LOG.audit(('Action deleted. Action.id=%s' % action_db.id), extra=extra) return Response(status=http_client.NO_CONTENT)
'Method for handling action data files. This method performs two tasks: 1. Writes files to disk 2. Updates affected PackDB model'
def _handle_data_files(self, pack_ref, data_files):
written_file_paths = self._write_data_files_to_disk(pack_ref=pack_ref, data_files=data_files) self._update_pack_model(pack_ref=pack_ref, data_files=data_files, written_file_paths=written_file_paths) return written_file_paths
'Write files to disk.'
def _write_data_files_to_disk(self, pack_ref, data_files):
written_file_paths = [] for data_file in data_files: file_path = data_file['file_path'] content = data_file['content'] file_path = get_pack_resource_file_abs_path(pack_ref=pack_ref, resource_type='action', file_path=file_path) LOG.debug(('Writing data file "%s" to "%s"' % (str(data_file), file_path))) self._write_data_file(pack_ref=pack_ref, file_path=file_path, content=content) written_file_paths.append(file_path) return written_file_paths
'Update PackDB models (update files list).'
def _update_pack_model(self, pack_ref, data_files, written_file_paths):
file_paths = [] for file_path in written_file_paths: file_path = get_relative_path_to_pack(pack_ref=pack_ref, file_path=file_path) file_paths.append(file_path) pack_db = Pack.get_by_ref(pack_ref) pack_db.files = set(pack_db.files) pack_db.files.update(set(file_paths)) pack_db.files = list(pack_db.files) pack_db = Pack.add_or_update(pack_db) return pack_db
'Write data file on disk.'
def _write_data_file(self, pack_ref, file_path, content):
pack_base_path = get_pack_base_path(pack_name=pack_ref) if (not os.path.isdir(pack_base_path)): raise ValueError(('Directory for pack "%s" doesn\'t exist' % pack_ref)) directory = os.path.dirname(file_path) if (not os.path.isdir(directory)): os.makedirs(directory) with open(file_path, 'w') as fp: fp.write(content)
'This method processes the file content and removes unicode BOM character if one is present. Note: If we don\'t do that, files view explodes with "UnicodeDecodeError: ... invalid start byte" because the json.dump doesn\'t know how to handle BOM character.'
def _process_file_content(self, content):
if content.startswith(codecs.BOM_UTF8): content = content[BOM_LEN:] return content
'Outputs the content of all the files inside the pack. Handles requests: GET /packs/views/files/<pack_ref_or_id>'
def get_one(self, ref_or_id, requester_user):
pack_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=pack_db, permission_type=PermissionType.PACK_VIEW) if (not pack_db): msg = ('Pack with ref_or_id "%s" does not exist' % ref_or_id) raise StackStormDBObjectNotFoundError(msg) pack_ref = pack_db.ref pack_files = pack_db.files result = [] for file_path in pack_files: normalized_file_path = get_pack_file_abs_path(pack_ref=pack_ref, file_path=file_path) if ((not normalized_file_path) or (not os.path.isfile(normalized_file_path))): continue file_size = self._get_file_size(file_path=normalized_file_path) if ((file_size is not None) and (file_size > MAX_FILE_SIZE)): LOG.debug(('Skipping file "%s" which size exceeds max file size (%s bytes)' % (normalized_file_path, MAX_FILE_SIZE))) continue content = self._get_file_content(file_path=normalized_file_path) include_file = self._include_file(file_path=file_path, content=content) if (not include_file): LOG.debug(('Skipping binary file "%s"' % normalized_file_path)) continue item = {'file_path': file_path, 'content': content} result.append(item) return result
'Method which returns True if the following file content should be included in the response. Right now we exclude any file with UTF8 BOM character in it - those are most likely binary files such as icon, etc.'
def _include_file(self, file_path, content):
if (codecs.BOM_UTF8 in content[:1024]): return False if ('\x00' in content[:1024]): return False return True
'Outputs the content of a specific file in a pack. Handles requests: GET /packs/views/file/<pack_ref_or_id>/<file path>'
def get_one(self, ref_or_id, file_path, requester_user, if_none_match=None, if_modified_since=None):
pack_db = self._get_by_ref_or_id(ref_or_id=ref_or_id) if (not pack_db): msg = ('Pack with ref_or_id "%s" does not exist' % ref_or_id) raise StackStormDBObjectNotFoundError(msg) if (not file_path): raise ValueError('Missing file path') pack_ref = pack_db.ref permission_type = PermissionType.PACK_VIEW if (file_path not in WHITELISTED_FILE_PATHS): rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=pack_db, permission_type=permission_type) normalized_file_path = get_pack_file_abs_path(pack_ref=pack_ref, file_path=file_path) if ((not normalized_file_path) or (not os.path.isfile(normalized_file_path))): raise StackStormDBObjectNotFoundError(('File "%s" not found' % file_path)) (file_size, file_mtime) = self._get_file_stats(file_path=normalized_file_path) response = Response() if (not self._is_file_changed(file_mtime, if_none_match=if_none_match, if_modified_since=if_modified_since)): response.status = http_client.NOT_MODIFIED else: if ((file_size is not None) and (file_size > MAX_FILE_SIZE)): msg = ('File %s exceeds maximum allowed file size (%s bytes)' % (file_path, MAX_FILE_SIZE)) raise ValueError(msg) content_type = (mimetypes.guess_type(normalized_file_path)[0] or 'application/octet-stream') response.headers['Content-Type'] = content_type response.body = self._get_file_content(file_path=normalized_file_path) response.headers['Last-Modified'] = format_date_time(file_mtime) response.headers['ETag'] = repr(file_mtime) return response
':param exclude_fields: A list of object fields to exclude. :type exclude_fields: ``list``'
def _get_all(self, exclude_fields=None, sort=None, offset=0, limit=None, query_options=None, from_model_kwargs=None, raw_filters=None):
raw_filters = (copy.deepcopy(raw_filters) or {}) exclude_fields = (exclude_fields or []) query_options = (query_options if query_options else self.query_options) sort = (sort.split(',') if sort else []) db_sort_values = [] for sort_key in sort: if sort_key.startswith('-'): direction = '-' sort_key = sort_key[1:] elif sort_key.startswith('+'): direction = '+' sort_key = sort_key[1:] else: direction = '' if (sort_key not in self.supported_filters): continue sort_value = (direction + self.supported_filters[sort_key]) db_sort_values.append(sort_value) default_sort_values = copy.copy(query_options.get('sort')) raw_filters['sort'] = (db_sort_values if db_sort_values else default_sort_values) offset = int(offset) if (offset >= (2 ** 31)): raise ValueError(('Offset "%s" specified is more than 32-bit int' % offset)) if (limit and (int(limit) > self.max_limit)): msg = ('Limit "%s" specified, maximum value is "%s"' % (limit, self.max_limit)) raise ValueError(msg) eop = ((offset + int(limit)) if limit else None) filters = {} for (k, v) in six.iteritems(self.supported_filters): filter_value = raw_filters.get(k, None) if (not filter_value): continue value_transform_function = self.filter_transform_functions.get(k, None) value_transform_function = (value_transform_function or (lambda value: value)) filter_value = value_transform_function(value=filter_value) if ((k == 'id') and isinstance(filter_value, list)): filters[(k + '__in')] = filter_value else: filters['__'.join(v.split('.'))] = filter_value instances = self.access.query(exclude_fields=exclude_fields, **filters) if (limit == 1): instances = instances.limit(limit) from_model_kwargs = (from_model_kwargs or {}) from_model_kwargs.update(self.from_model_kwargs) result = [] for instance in instances[offset:eop]: item = self.model.from_model(instance, **from_model_kwargs) result.append(item) resp = Response(json=result) resp.headers['X-Total-Count'] = str(instances.count()) if limit: resp.headers['X-Limit'] = str(limit) return resp
':param exclude_fields: A list of object fields to exclude. :type exclude_fields: ``list``'
def _get_one_by_id(self, id, requester_user, permission_type, exclude_fields=None, from_model_kwargs=None):
instance = self._get_by_id(resource_id=id, exclude_fields=exclude_fields) if permission_type: rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=instance, permission_type=permission_type) if (not instance): msg = ('Unable to identify resource with id "%s".' % id) abort(http_client.NOT_FOUND, msg) from_model_kwargs = (from_model_kwargs or {}) from_model_kwargs.update(self.from_model_kwargs) result = self.model.from_model(instance, **from_model_kwargs) return result
':param exclude_fields: A list of object fields to exclude. :type exclude_fields: ``list``'
def _get_one_by_name_or_id(self, name_or_id, requester_user, permission_type, exclude_fields=None, from_model_kwargs=None):
instance = self._get_by_name_or_id(name_or_id=name_or_id, exclude_fields=exclude_fields) if permission_type: rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user, resource_db=instance, permission_type=permission_type) if (not instance): msg = ('Unable to identify resource with name_or_id "%s".' % name_or_id) abort(http_client.NOT_FOUND, msg) from_model_kwargs = (from_model_kwargs or {}) from_model_kwargs.update(self.from_model_kwargs) result = self.model.from_model(instance, **from_model_kwargs) return result
'Retrieve resource object by an id of a name.'
def _get_by_name_or_id(self, name_or_id, exclude_fields=None):
resource_db = self._get_by_id(resource_id=name_or_id, exclude_fields=exclude_fields) if (not resource_db): resource_db = self._get_by_name(resource_name=name_or_id, exclude_fields=exclude_fields) if (not resource_db): msg = ('Resource with a name or id "%s" not found' % name_or_id) raise StackStormDBObjectNotFoundError(msg) return resource_db
'Retrieve an item given scope and name. Only KeyValuePair now has concept of \'scope\'. :param scope: Scope the key belongs to. :type scope: ``str`` :param name: Name of the key. :type name: ``str``'
def _get_one_by_scope_and_name(self, scope, name, from_model_kwargs=None):
instance = self.access.get_by_scope_and_name(scope=scope, name=name) if (not instance): msg = ('KeyValuePair with name: %s and scope: %s not found in db.' % (name, scope)) raise StackStormDBObjectNotFoundError(msg) from_model_kwargs = (from_model_kwargs or {}) result = self.model.from_model(instance, **from_model_kwargs) LOG.debug('GET with scope=%s and name=%s, client_result=%s', scope, name, result) return result
'Validate that provided exclude fields are valid.'
def _validate_exclude_fields(self, exclude_fields):
if (not exclude_fields): return exclude_fields for field in exclude_fields: if (field not in self.valid_exclude_attributes): msg = ('Invalid or unsupported attribute specified: %s' % field) raise ValueError(msg) return exclude_fields