desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Build a string of named and positional arguments which are passed to the
script.
:param named_args: Dictionary with named arguments.
:type named_args: ``dict``.
:param positional_args: List with positional arguments.
:type positional_args: ``dict``.
:rtype: ``str``'
| def _get_script_arguments(self, named_args=None, positional_args=None):
| command_parts = []
if (named_args is not None):
for (arg, value) in six.iteritems(named_args):
if ((value is None) or (isinstance(value, (str, unicode)) and (len(value) < 1))):
LOG.debug('Ignoring arg %s as its value is %s.', arg, value)
continue
if isinstance(value, bool):
if (value is True):
command_parts.append(arg)
else:
values = (quote_unix(arg), quote_unix(six.text_type(value)))
command_parts.append(six.text_type(('%s=%s' % values)))
if positional_args:
quoted_pos_args = [quote_unix(pos_arg) for pos_arg in positional_args]
pos_args_string = ' '.join(quoted_pos_args)
command_parts.append(pos_args_string)
return ' '.join(command_parts)
|
'Given a key ``name`` and ``user``, this method returns a new name (string ref)
to address the key value pair in the context of that user.
:param user: User to whom key belongs.
:type name: ``str``
:param name: Original name of the key.
:type name: ``str``
:rtype: ``str``'
| @staticmethod
def to_string_reference(user, name):
| if ((not user) or (not name)):
raise ValueError('Both "user" and "name" must be valid to generate ref.')
return UserKeyReference(user=user, name=name).ref
|
'Given a user key ``reference``, this method returns the user and actual name of the key.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``tuple`` of ``str`` and ``str``'
| @staticmethod
def from_string_reference(ref):
| user = UserKeyReference.get_user(ref)
name = UserKeyReference.get_name(ref)
return (user, name)
|
'Given a user key ``reference``, this method returns the user to whom the key belongs.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``str``'
| @staticmethod
def get_user(ref):
| try:
return ref.split(USER_SEPARATOR, 1)[0]
except (IndexError, AttributeError):
raise InvalidUserKeyReferenceError(ref=ref)
|
'Given a user key ``reference``, this method returns the name of the key.
:param ref: Reference to user key.
:type ref: ``str``
:rtype: ``str``'
| @staticmethod
def get_name(ref):
| try:
return ref.split(USER_SEPARATOR, 1)[1]
except (IndexError, AttributeError):
raise InvalidUserKeyReferenceError(ref=ref)
|
'Convert a `datetime` object to number of microseconds since epoch representation
(which will be stored in MongoDB). This is the reverse function of
`_convert_from_db`.'
| def _convert_from_datetime(self, val):
| result = self._datetime_to_microseconds_since_epoch(value=val)
return result
|
'Convert a number representation to a `datetime` object (the object you
will manipulate). This is the reverse function of
`_convert_from_datetime`.
:param data: Number of microseconds since the epoch.
:type data: ``int``'
| def _microseconds_since_epoch_to_datetime(self, data):
| result = datetime.datetime.utcfromtimestamp((data // SECOND_TO_MICROSECONDS))
microseconds_reminder = (data % SECOND_TO_MICROSECONDS)
result = result.replace(microsecond=microseconds_reminder)
result = date_utils.add_utc_tz(result)
return result
|
'Convert datetime in UTC to number of microseconds from epoch.
Note: datetime which is passed to the function needs to be in UTC timezone (e.g. as returned
by ``datetime.datetime.utcnow``).
:rtype: ``int``'
| def _datetime_to_microseconds_since_epoch(self, value):
| if ((not value.tzinfo) or (value.tzinfo.utcoffset(value) != datetime.timedelta(0))):
raise ValueError('Value passed to this function needs to be in UTC timezone')
seconds = calendar.timegm(value.timetuple())
microseconds_reminder = value.time().microsecond
result = (int((seconds * SECOND_TO_MICROSECONDS)) + microseconds_reminder)
return result
|
'Retrieve a list of runners in the provided directories.
:return: Dictionary where the key is runner name and the value is full path to the runner
directory.
:rtype: ``dict``'
| def get_runners(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
runners_in_dir = self._get_runners_from_dir(base_dir=base_dir)
result.update(runners_in_dir)
return result
|
'Retrieve a list of packs in the provided directories.
:return: Dictionary where the key is pack name and the value is full path to the pack
directory.
:rtype: ``dict``'
| def get_packs(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
packs_in_dir = self._get_packs_from_dir(base_dir=base_dir)
result.update(packs_in_dir)
return result
|
'Retrieve content from the provided directories.
Provided directories are searched from left to right. If a pack with the same name exists
in multiple directories, first pack which is found wins.
:param base_dirs: Directories to look into.
:type base_dirs: ``list``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``dict``'
| def get_content(self, base_dirs, content_type):
| assert isinstance(base_dirs, list)
if (content_type not in self.ALLOWED_CONTENT_TYPES):
raise ValueError(('Unsupported content_type: %s' % content_type))
content = {}
pack_to_dir_map = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
dir_content = self._get_content_from_dir(base_dir=base_dir, content_type=content_type)
for (pack_name, pack_content) in six.iteritems(dir_content):
if (pack_name in content):
pack_dir = pack_to_dir_map[pack_name]
LOG.warning(('Pack "%s" already found in "%s", ignoring content from "%s"' % (pack_name, pack_dir, base_dir)))
else:
content[pack_name] = pack_content
pack_to_dir_map[pack_name] = base_dir
return content
|
'Retrieve content from the provided pack directory.
:param pack_dir: Path to the pack directory.
:type pack_dir: ``str``
:param content_type: Content type to look for (sensors, actions, rules).
:type content_type: ``str``
:rtype: ``str``'
| def get_content_from_pack(self, pack_dir, content_type):
| if (content_type not in self.ALLOWED_CONTENT_TYPES):
raise ValueError(('Unsupported content_type: %s' % content_type))
if (not os.path.isdir(pack_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % pack_dir))
content = self._get_content_from_pack_dir(pack_dir=pack_dir, content_type=content_type)
return content
|
'Retrieve a list of runners in the provided directories.
:return: Dictionary where the key is runner name and the value is full path to the runner
directory.
:rtype: ``dict``'
| def get_runners(self, base_dirs):
| assert isinstance(base_dirs, list)
result = {}
for base_dir in base_dirs:
if (not os.path.isdir(base_dir)):
raise ValueError(('Directory "%s" doesn\'t exist' % base_dir))
runners_in_dir = self._get_runners_from_dir(base_dir=base_dir)
result.update(runners_in_dir)
return result
|
'Loads content from file_path if file_path\'s extension
is one of allowed ones (See ALLOWED_EXTS).
Throws UnsupportedMetaException on disallowed filetypes.
Throws ValueError on malformed meta.
:param file_path: Absolute path to the file to load content from.
:type file_path: ``str``
:param expected_type: Expected type for the loaded and parsed content (optional).
:type expected_type: ``object``
:rtype: ``dict``'
| def load(self, file_path, expected_type=None):
| (file_name, file_ext) = os.path.splitext(file_path)
if (file_ext not in ALLOWED_EXTS):
raise Exception(('Unsupported meta type %s, file %s. Allowed: %s' % (file_ext, file_path, ALLOWED_EXTS)))
result = self._load(PARSER_FUNCS[file_ext], file_path)
if (expected_type and (not isinstance(result, expected_type))):
actual_type = type(result).__name__
error = ('Expected "%s", got "%s"' % (expected_type.__name__, actual_type))
raise ValueError(error)
return result
|
':return: Dict with the following keys: roles, role_assiginments
:rtype: ``dict``'
| def load(self):
| result = {}
result['roles'] = self.load_role_definitions()
result['role_assignments'] = self.load_user_role_assignments()
result['group_to_role_maps'] = self.load_group_to_role_maps()
return result
|
'Load all the role definitions.
:rtype: ``dict``'
| def load_role_definitions(self):
| LOG.info(('Loading role definitions from "%s"' % self._role_definitions_path))
file_paths = self._get_role_definitions_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading role definition from: %s' % file_path))
role_definition_api = self.load_role_definition_from_file(file_path=file_path)
role_name = role_definition_api.name
enabled = getattr(role_definition_api, 'enabled', True)
if (role_name in result):
raise ValueError(('Duplicate definition file found for role "%s"' % role_name))
if (not enabled):
LOG.debug(('Skipping disabled role "%s"' % role_name))
continue
result[role_name] = role_definition_api
return result
|
'Load all the user role assignments.
:rtype: ``dict``'
| def load_user_role_assignments(self):
| LOG.info(('Loading user role assignments from "%s"' % self._role_assignments_path))
file_paths = self._get_role_assiginments_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading user role assignments from: %s' % file_path))
role_assignment_api = self.load_user_role_assignments_from_file(file_path=file_path)
username = role_assignment_api.username
enabled = getattr(role_assignment_api, 'enabled', True)
if (username in result):
raise ValueError(('Duplicate definition file found for user "%s"' % username))
if (not enabled):
LOG.debug(('Skipping disabled role assignment for user "%s"' % username))
continue
result[username] = role_assignment_api
return result
|
'Load all the remote group to local role mappings.
:rtype: ``dict``'
| def load_group_to_role_maps(self):
| LOG.info(('Loading group to role map definitions from "%s"' % self._role_maps_path))
file_paths = self._get_group_to_role_maps_file_paths()
result = {}
for file_path in file_paths:
LOG.debug(('Loading group to role mapping from: %s' % file_path))
group_to_role_map_api = self.load_group_to_role_map_assignment_from_file(file_path=file_path)
group_name = group_to_role_map_api.group
result[group_name] = group_to_role_map_api
return result
|
'Load role definition from file.
:param file_path: Path to the role definition file.
:type file_path: ``str``
:return: Role definition.
:rtype: :class:`RoleDefinitionFileFormatAPI`'
| def load_role_definition_from_file(self, file_path):
| content = self._meta_loader.load(file_path)
if (not content):
msg = ('Role definition file "%s" is empty and invalid' % file_path)
raise ValueError(msg)
role_definition_api = RoleDefinitionFileFormatAPI(**content)
role_definition_api = role_definition_api.validate()
return role_definition_api
|
'Load user role assignments from file.
:param file_path: Path to the user role assignment file.
:type file_path: ``str``
:return: User role assignments.
:rtype: :class:`UserRoleAssignmentFileFormatAPI`'
| def load_user_role_assignments_from_file(self, file_path):
| content = self._meta_loader.load(file_path)
if (not content):
msg = ('Role assignment file "%s" is empty and invalid' % file_path)
raise ValueError(msg)
user_role_assignment_api = UserRoleAssignmentFileFormatAPI(**content)
user_role_assignment_api = user_role_assignment_api.validate()
return user_role_assignment_api
|
'Retrieve a list of paths for all the role definitions.
Notes: Roles are sorted in an alphabetical order based on the role name.
:rtype: ``list``'
| def _get_role_definitions_file_paths(self):
| glob_str = (self._role_definitions_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Retrieve a list of paths for all the user role assignments.
Notes: Assignments are sorted in an alphabetical order based on the username.
:rtype: ``list``'
| def _get_role_assiginments_file_paths(self):
| glob_str = (self._role_assignments_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Retrieve a list of path for remote group to local role mapping assignment files.
:rtype: ``list``'
| def _get_group_to_role_maps_file_paths(self):
| glob_str = (self._role_maps_path + '*.yaml')
file_paths = glob.glob(glob_str)
file_paths = sorted(file_paths, cmp=compare_path_file_name)
return file_paths
|
'Return valid permissions for the provided resource type.
:rtype: ``list``'
| @classmethod
def get_valid_permissions_for_resource_type(cls, resource_type):
| valid_permissions = RESOURCE_TYPE_TO_PERMISSION_TYPES_MAP[resource_type]
return valid_permissions
|
'Retrieve resource type from the provided permission type.
:rtype: ``str``'
| @classmethod
def get_resource_type(cls, permission_type):
| if (permission_type == PermissionType.PACK_VIEWS_INDEX_HEALTH):
return ResourceType.PACK
elif (permission_type == PermissionType.EXECUTION_VIEWS_FILTERS_LIST):
return ResourceType.EXECUTION
split = permission_type.split('_')
assert (len(split) >= 2)
return '_'.join(split[:(-1)])
|
'Retrieve permission name from the provided permission type.
:rtype: ``str``'
| @classmethod
def get_permission_name(cls, permission_type):
| split = permission_type.split('_')
assert (len(split) >= 2)
if (permission_type == PermissionType.PACK_VIEWS_INDEX_HEALTH):
split = permission_type.split('_', 1)
return split[1]
return split[(-1)]
|
'Retrieve a description for the provided permission_type.
:rtype: ``str``'
| @classmethod
def get_permission_description(cls, permission_type):
| description = PERMISION_TYPE_TO_DESCRIPTION_MAP[permission_type]
return description
|
'Retrieve permission type enum value for the provided resource type and permission name.
:rtype: ``str``'
| @classmethod
def get_permission_type(cls, resource_type, permission_name):
| if (resource_type == ResourceType.SENSOR):
resource_type = 'sensor'
permission_enum = ('%s_%s' % (resource_type.upper(), permission_name.upper()))
result = getattr(cls, permission_enum, None)
if (not result):
raise ValueError(('Unsupported permission type for type "%s" and name "%s"' % (resource_type, permission_name)))
return result
|
'Synchronize all the role definitions, user role assignments and remote group to local roles
maps.'
| def sync(self, role_definition_apis, role_assignment_apis, group_to_role_map_apis):
| result = {}
result['roles'] = self.sync_roles(role_definition_apis)
result['role_assignments'] = self.sync_users_role_assignments(role_assignment_apis)
result['group_to_role_maps'] = self.sync_group_to_role_maps(group_to_role_map_apis)
return result
|
'Synchronize all the role definitions in the database.
:param role_dbs: RoleDB objects for the roles which are currently in the database.
:type role_dbs: ``list`` of :class:`RoleDB`
:param role_definition_apis: RoleDefinition API objects for the definitions loaded from
the files.
:type role_definition_apis: ``list`` of :class:RoleDefinitionFileFormatAPI`
:rtype: ``tuple``'
| def sync_roles(self, role_definition_apis):
| LOG.info('Synchronizing roles...')
role_dbs = rbac_services.get_all_roles(exclude_system=True)
role_db_names = [role_db.name for role_db in role_dbs]
role_db_names = set(role_db_names)
role_api_names = [role_definition_api.name for role_definition_api in role_definition_apis]
role_api_names = set(role_api_names)
new_role_names = role_api_names.difference(role_db_names)
updated_role_names = role_db_names.intersection(role_api_names)
removed_role_names = (role_db_names - role_api_names)
LOG.debug(('New roles: %r' % new_role_names))
LOG.debug(('Updated roles: %r' % updated_role_names))
LOG.debug(('Removed roles: %r' % removed_role_names))
role_names_to_delete = updated_role_names.union(removed_role_names)
role_dbs_to_delete = [role_db for role_db in role_dbs if (role_db.name in role_names_to_delete)]
role_names_to_create = new_role_names.union(updated_role_names)
role_apis_to_create = [role_definition_api for role_definition_api in role_definition_apis if (role_definition_api.name in role_names_to_create)]
role_ids_to_delete = []
for role_db in role_dbs_to_delete:
role_ids_to_delete.append(role_db.id)
LOG.debug(('Deleting %s stale roles' % len(role_ids_to_delete)))
Role.query(id__in=role_ids_to_delete, system=False).delete()
LOG.debug(('Deleted %s stale roles' % len(role_ids_to_delete)))
permission_grant_ids_to_delete = []
for role_db in role_dbs_to_delete:
permission_grant_ids_to_delete.extend(role_db.permission_grants)
LOG.debug(('Deleting %s stale permission grants' % len(permission_grant_ids_to_delete)))
PermissionGrant.query(id__in=permission_grant_ids_to_delete).delete()
LOG.debug(('Deleted %s stale permission grants' % len(permission_grant_ids_to_delete)))
LOG.debug(('Creating %s new roles' % len(role_apis_to_create)))
created_role_dbs = []
for role_api in role_apis_to_create:
role_db = rbac_services.create_role(name=role_api.name, description=role_api.description)
permission_grants = getattr(role_api, 'permission_grants', [])
for permission_grant in permission_grants:
resource_uid = permission_grant.get('resource_uid', None)
if resource_uid:
(resource_type, _) = parse_uid(resource_uid)
else:
resource_type = None
permission_types = permission_grant['permission_types']
assignment_db = rbac_services.create_permission_grant(role_db=role_db, resource_uid=resource_uid, resource_type=resource_type, permission_types=permission_types)
role_db.permission_grants.append(str(assignment_db.id))
created_role_dbs.append(role_db)
LOG.debug(('Created %s new roles' % len(created_role_dbs)))
LOG.info(('Roles synchronized (%s created, %s updated, %s removed)' % (len(new_role_names), len(updated_role_names), len(removed_role_names))))
return [created_role_dbs, role_dbs_to_delete]
|
'Synchronize role assignments for all the users in the database.
:param role_assignment_apis: Role assignments API objects for the assignments loaded
from the files.
:type role_assignment_apis: ``list`` of :class:`UserRoleAssignmentFileFormatAPI`
:return: Dictionary with created and removed role assignments for each user.
:rtype: ``dict``'
| def sync_users_role_assignments(self, role_assignment_apis):
| assert isinstance(role_assignment_apis, (list, tuple))
LOG.info('Synchronizing users role assignments...')
role_assignment_dbs = rbac_services.get_all_role_assignments(include_remote=False)
user_dbs = User.get_all()
username_to_user_db_map = dict([(user_db.name, user_db) for user_db in user_dbs])
username_to_role_assignment_api_map = dict([(role_assignment_api.username, role_assignment_api) for role_assignment_api in role_assignment_apis])
username_to_role_assignment_dbs_map = defaultdict(list)
for role_assignment_db in role_assignment_dbs:
username = role_assignment_db.user
username_to_role_assignment_dbs_map[username].append(role_assignment_db)
all_usernames = ((username_to_user_db_map.keys() + username_to_role_assignment_api_map.keys()) + username_to_role_assignment_dbs_map.keys())
all_usernames = list(set(all_usernames))
results = {}
for username in all_usernames:
role_assignment_api = username_to_role_assignment_api_map.get(username, None)
user_db = username_to_user_db_map.get(username, None)
if (not user_db):
user_db = UserDB(name=username)
LOG.debug(('User "%s" doesn\'t exist in the DB, creating assignment anyway' % username))
role_assignment_dbs = username_to_role_assignment_dbs_map.get(username, [])
for role_assignment_db in role_assignment_dbs:
assert (role_assignment_db.is_remote is False)
result = self._sync_user_role_assignments(user_db=user_db, role_assignment_dbs=role_assignment_dbs, role_assignment_api=role_assignment_api)
results[username] = result
LOG.info('User role assignments synchronized')
return results
|
'Synchronize role assignments for a particular user.
:param user_db: User to synchronize the assignments for.
:type user_db: :class:`UserDB`
:param role_assignment_dbs: Existing user role assignments.
:type role_assignment_dbs: ``list`` of :class:`UserRoleAssignmentDB`
:param role_assignment_api: Role assignment API for a particular user.
:param role_assignment_api: :class:`UserRoleAssignmentFileFormatAPI`
:rtype: ``tuple``'
| def _sync_user_role_assignments(self, user_db, role_assignment_dbs, role_assignment_api):
| db_role_names = [role_assignment_db.role for role_assignment_db in role_assignment_dbs]
db_role_names = set(db_role_names)
api_role_names = (role_assignment_api.roles if role_assignment_api else [])
api_role_names = set(api_role_names)
new_role_names = api_role_names.difference(db_role_names)
updated_role_names = db_role_names.intersection(api_role_names)
removed_role_names = (db_role_names - api_role_names)
LOG.debug(('New assignments for user "%s": %r' % (user_db.name, new_role_names)))
LOG.debug(('Updated assignments for user "%s": %r' % (user_db.name, updated_role_names)))
LOG.debug(('Removed assignments for user "%s": %r' % (user_db.name, removed_role_names)))
role_names_to_delete = updated_role_names.union(removed_role_names)
role_assignment_dbs_to_delete = [role_assignment_db for role_assignment_db in role_assignment_dbs if (role_assignment_db.role in role_names_to_delete)]
queryset_filter = ((Q(user=user_db.name) & Q(role__in=role_names_to_delete)) & (Q(is_remote=False) | Q(is_remote__exists=False)))
UserRoleAssignmentDB.objects(queryset_filter).delete()
LOG.debug(('Removed %s assignments for user "%s"' % (len(role_assignment_dbs_to_delete), user_db.name)))
role_names_to_create = new_role_names.union(updated_role_names)
role_dbs_to_assign = Role.query(name__in=role_names_to_create)
created_role_assignment_dbs = []
for role_db in role_dbs_to_assign:
if (role_db.name in role_assignment_api.roles):
description = getattr(role_assignment_api, 'description', None)
else:
description = None
assignment_db = rbac_services.assign_role_to_user(role_db=role_db, user_db=user_db, description=description)
created_role_assignment_dbs.append(assignment_db)
LOG.debug(('Created %s new assignments for user "%s"' % (len(role_dbs_to_assign), user_db.name)))
return (created_role_assignment_dbs, role_assignment_dbs_to_delete)
|
':param user_db: User to sync the assignments for.
:type user: :class:`UserDB`
:param groups: A list of remote groups user is a member of.
:type groups: ``list`` of ``str``
:return: A list of mappings which have been created.
:rtype: ``list`` of :class:`UserRoleAssignmentDB`'
| def sync(self, user_db, groups):
| groups = list(set(groups))
extra = {'user_db': user_db, 'groups': groups}
LOG.info(('Synchronizing remote role assignments for user "%s"' % str(user_db)), extra=extra)
all_mapping_dbs = GroupToRoleMapping.query(group__in=groups)
enabled_mapping_dbs = [mapping_db for mapping_db in all_mapping_dbs if mapping_db.enabled]
disabled_mapping_dbs = [mapping_db for mapping_db in all_mapping_dbs if (not mapping_db.enabled)]
if (not all_mapping_dbs):
LOG.debug(('No group to role mappings found for user "%s"' % str(user_db)), extra=extra)
remote_assignment_dbs = UserRoleAssignment.query(user=user_db.name, is_remote=True)
existing_role_names = [assignment_db.role for assignment_db in remote_assignment_dbs]
existing_role_names = set(existing_role_names)
current_role_names = set([])
for mapping_db in all_mapping_dbs:
for role in mapping_db.roles:
current_role_names.add(role)
new_role_names = current_role_names.difference(existing_role_names)
updated_role_names = existing_role_names.intersection(current_role_names)
removed_role_names = (existing_role_names - new_role_names)
for mapping_db in disabled_mapping_dbs:
for role in mapping_db.roles:
removed_role_names.add(role)
LOG.debug(('New role assignments: %r' % new_role_names))
LOG.debug(('Updated role assignments: %r' % updated_role_names))
LOG.debug(('Removed role assignments: %r' % removed_role_names))
role_names_to_delete = updated_role_names.union(removed_role_names)
role_assignment_dbs_to_delete = [role_assignment_db for role_assignment_db in remote_assignment_dbs if (role_assignment_db.role in role_names_to_delete)]
UserRoleAssignment.query(user=user_db.name, role__in=role_names_to_delete, is_remote=True).delete()
created_assignments_dbs = []
for mapping_db in enabled_mapping_dbs:
extra['mapping_db'] = mapping_db
for role_name in mapping_db.roles:
role_db = rbac_services.get_role_by_name(name=role_name)
if (not role_db):
LOG.info(('Role with name "%s" for mapping "%s" not found, skipping assignment.' % (role_name, str(mapping_db))), extra=extra)
continue
description = ('Automatic role assignment based on the remote user membership in group "%s"' % mapping_db.group)
assignment_db = rbac_services.assign_role_to_user(role_db=role_db, user_db=user_db, description=description, is_remote=True)
assert (assignment_db.is_remote is True)
created_assignments_dbs.append(assignment_db)
LOG.debug(('Created %s new remote role assignments for user "%s"' % (len(created_assignments_dbs), str(user_db))), extra=extra)
return (created_assignments_dbs, role_assignment_dbs_to_delete)
|
'Method for checking user permissions which are not tied to a particular resource.'
| def user_has_permission(self, user_db, permission_type):
| raise NotImplementedError()
|
'Method for checking user permissions on a resource which is to be created (e.g.
create operation).'
| def user_has_resource_api_permission(self, user_db, resource_api, permission_type):
| raise NotImplementedError()
|
'Method for checking user permissions on an existing resource (e.g. get one, edit, delete
operations).'
| def user_has_resource_db_permission(self, user_db, resource_db, permission_type):
| raise NotImplementedError()
|
'Common method for checking if a user has specific "list" resource permission (e.g.
rules_list, action_list, etc.).'
| def _user_has_list_permission(self, user_db, permission_type):
| assert (PermissionType.get_permission_name(permission_type) == 'list')
return self._user_has_global_permission(user_db=user_db, permission_type=permission_type)
|
'Custom method for checking if user has a particular global permission which doesn\'t apply
to a specific resource but it\'s system-wide aka global permission.'
| def _user_has_global_permission(self, user_db, permission_type):
| log_context = {'user_db': user_db, 'permission_type': permission_type, 'resolver': self.__class__.__name__}
self._log('Checking user permissions', extra=log_context)
has_system_role_permission = self._user_has_system_role_permission(user_db=user_db, permission_type=permission_type)
if has_system_role_permission:
self._log('Found a matching grant via system role', extra=log_context)
return True
permission_types = [permission_type]
permission_grants = get_all_permission_grants_for_user(user_db=user_db, permission_types=permission_types)
if (len(permission_grants) >= 1):
self._log('Found a direct grant', extra=log_context)
return True
self._log('No matching grants found', extra=log_context)
return False
|
'Check the user system roles and return True if user has the required permission.
:rtype: ``bool``'
| def _user_has_system_role_permission(self, user_db, permission_type):
| permission_name = PermissionType.get_permission_name(permission_type)
user_role_dbs = get_roles_for_user(user_db=user_db)
user_role_names = [role_db.name for role_db in user_role_dbs]
if (SystemRole.SYSTEM_ADMIN in user_role_names):
return True
elif (SystemRole.ADMIN in user_role_names):
return True
elif ((SystemRole.OBSERVER in user_role_names) and (permission_name in READ_PERMISSION_NAMES)):
return True
return False
|
':rtype: ``bool``'
| def _matches_permission_grant(self, resource_db, permission_grant, permission_type, all_permission_type):
| if (permission_type in permission_grant.permission_types):
return True
elif (all_permission_type in permission_grant.permission_types):
return True
return False
|
'Retrieve "ALL" permission type for the provided resource.'
| def _get_all_permission_type_for_resource(self, resource_db):
| resource_type = resource_db.get_resource_type()
permission_type = PermissionType.get_permission_type(resource_type=resource_type, permission_name='all')
return permission_type
|
'Custom logger method which prefix message with the class and caller method name.'
| def _log(self, message, extra, level=stdlib_logging.DEBUG, **kwargs):
| class_name = self.__class__.__name__
method_name = sys._getframe().f_back.f_code.co_name
message_prefix = ('%s.%s: ' % (class_name, method_name))
message = (message_prefix + message)
LOG.log(level, message, extra=extra, **kwargs)
|
'Check if the user has access to the provided trigger.
This method is to be used during rule create and update where we check if the user has the
necessary trigger permissions.
Note: Right now we only support webhook triggers.
:param trigger: "trigger" attribute of the RuleAPI object.
:type trigger: ``dict``'
| def user_has_trigger_permission(self, user_db, trigger):
| log_context = {'user_db': user_db, 'trigger': trigger, 'resolver': self.__class__.__name__}
trigger_type = trigger['type']
trigger_parameters = trigger.get('parameters', {})
if (trigger_type != WEBHOOK_TRIGGER_TYPE):
self._log('Not a webhook trigger type, ignoring trigger permission checking', extra=log_context)
return True
resolver = get_resolver_for_resource_type(ResourceType.WEBHOOK)
webhook_db = WebhookDB(name=trigger_parameters['url'])
permission_type = PermissionType.WEBHOOK_CREATE
result = resolver.user_has_resource_db_permission(user_db=user_db, resource_db=webhook_db, permission_type=permission_type)
if (result is True):
self._log('Found a matching trigger grant', extra=log_context)
return True
self._log('No matching trigger grants found', extra=log_context)
return False
|
'Check if the user has "execute" permission on the provided action.'
| def user_has_action_permission(self, user_db, action_ref):
| pass
|
'The method is invoked on every request and shows the lifecycle of the request received from
the middleware.
Although some middleware may use parts of the API spec, it is safe to assume that if you\'re
looking for the particular spec property handler, it\'s most likely a part of this method.
At the time of writing, the only property being utilized by middleware was `x-log-result`.'
| def __call__(self, req):
| LOG.debug('Recieved call with WebOb: %s', req)
(endpoint, path_vars) = self.match(req)
LOG.debug('Parsed endpoint: %s', endpoint)
LOG.debug('Parsed path_vars: %s', path_vars)
context = copy.copy(getattr(self, 'mock_context', {}))
if ('security' in endpoint):
security = endpoint.get('security')
else:
security = self.spec.get('security', [])
if (self.auth and security):
try:
auth_resp = None
security_definitions = self.spec.get('securityDefinitions', {})
for statement in security:
(declaration, options) = statement.copy().popitem()
definition = security_definitions[declaration]
if (definition['type'] == 'apiKey'):
if (definition['in'] == 'header'):
token = req.headers.get(definition['name'])
elif (definition['in'] == 'query'):
token = req.GET.get(definition['name'])
else:
token = None
if token:
if auth_resp:
raise auth_exc.MultipleAuthSourcesError('Only one of Token or API key expected.')
auth_func = op_resolver(definition['x-operationId'])
auth_resp = auth_func(token)
context['user'] = User.get_by_name(auth_resp.user)
if ('user' not in context):
raise auth_exc.NoAuthSourceProvidedError('One of Token or API key required.')
except (auth_exc.NoAuthSourceProvidedError, auth_exc.MultipleAuthSourcesError) as e:
LOG.error(str(e))
return abort_unauthorized(str(e))
except auth_exc.TokenNotProvidedError as e:
LOG.exception('Token is not provided.')
return abort_unauthorized(str(e))
except auth_exc.TokenNotFoundError as e:
LOG.exception('Token is not found.')
return abort_unauthorized(str(e))
except auth_exc.TokenExpiredError as e:
LOG.exception('Token has expired.')
return abort_unauthorized(str(e))
except auth_exc.ApiKeyNotProvidedError as e:
LOG.exception('API key is not provided.')
return abort_unauthorized(str(e))
except auth_exc.ApiKeyNotFoundError as e:
LOG.exception('API key is not found.')
return abort_unauthorized(str(e))
except auth_exc.ApiKeyDisabledError as e:
LOG.exception('API key is disabled.')
return abort_unauthorized(str(e))
if cfg.CONF.rbac.enable:
user_db = context['user']
permission_type = endpoint.get('x-permissions', None)
if permission_type:
resolver = resolvers.get_resolver_for_permission_type(permission_type)
has_permission = resolver.user_has_permission(user_db, permission_type)
if (not has_permission):
raise rbac_exc.ResourceTypeAccessDeniedError(user_db, permission_type)
kw = {}
for param in (endpoint.get('parameters', []) + endpoint.get('x-parameters', [])):
name = param['name']
argument_name = (param.get('x-as', None) or name)
source = param['in']
default = param.get('default', None)
if (source == 'query'):
kw[argument_name] = req.GET.get(name, default)
elif (source == 'path'):
kw[argument_name] = path_vars[name]
elif (source == 'header'):
kw[argument_name] = req.headers.get(name, default)
elif (source == 'formData'):
kw[argument_name] = req.POST.get(name, default)
elif (source == 'environ'):
kw[argument_name] = req.environ.get(name.upper(), default)
elif (source == 'context'):
kw[argument_name] = context.get(name, default)
elif (source == 'request'):
kw[argument_name] = getattr(req, name)
elif (source == 'body'):
if req.body:
content_type = req.headers.get('Content-Type', 'application/json')
content_type = parse_content_type_header(content_type=content_type)[0]
schema = param['schema']
try:
if (content_type == 'application/json'):
data = req.json
elif (content_type == 'text/plain'):
data = req.body
elif (content_type in ['application/x-www-form-urlencoded', 'multipart/form-data']):
data = urlparse.parse_qs(req.body)
else:
raise ValueError(('Unsupported Content-Type: "%s"' % content_type))
except Exception as e:
detail = ('Failed to parse request body: %s' % str(e))
raise exc.HTTPBadRequest(detail=detail)
try:
CustomValidator(schema, resolver=self.spec_resolver).validate(data)
except (jsonschema.ValidationError, ValueError) as e:
raise exc.HTTPBadRequest(detail=e.message, comment=traceback.format_exc())
if (content_type == 'text/plain'):
kw[argument_name] = data
else:
class Body(object, ):
def __init__(self, **entries):
self.__dict__.update(entries)
ref = schema.get('$ref', None)
if ref:
with self.spec_resolver.resolving(ref) as resolved:
schema = resolved
if ('x-api-model' in schema):
Model = op_resolver(schema['x-api-model'])
instance = Model(**data)
try:
instance = instance.validate()
except (jsonschema.ValidationError, ValueError) as e:
raise exc.HTTPBadRequest(detail=e.message, comment=traceback.format_exc())
else:
LOG.debug(('Missing x-api-model definition for %s, using generic Body model.' % endpoint['operationId']))
model = Body
instance = model(**data)
kw[argument_name] = instance
else:
kw[argument_name] = None
required = param.get('required', False)
if (required and (kw[argument_name] is None)):
detail = ('Required parameter "%s" is missing' % name)
raise exc.HTTPBadRequest(detail=detail)
param_type = param.get('type', None)
if (kw[argument_name] is not None):
if (param_type == 'boolean'):
positive = ('true', '1', 'yes', 'y')
negative = ('false', '0', 'no', 'n')
if (str(kw[argument_name]).lower() not in (positive + negative)):
detail = ('Parameter "%s" is not of type boolean' % argument_name)
raise exc.HTTPBadRequest(detail=detail)
kw[argument_name] = (str(kw[argument_name]).lower() in positive)
elif (param_type == 'integer'):
regex = '^-?[0-9]+$'
if (not re.search(regex, str(kw[argument_name]))):
detail = ('Parameter "%s" is not of type integer' % argument_name)
raise exc.HTTPBadRequest(detail=detail)
kw[argument_name] = int(kw[argument_name])
elif (param_type == 'number'):
regex = '^[+-]?(\\d+(\\.\\d*)?|\\.\\d+)([eE][+-]?\\d+)?$'
if (not re.search(regex, str(kw[argument_name]))):
detail = ('Parameter "%s" is not of type float' % argument_name)
raise exc.HTTPBadRequest(detail=detail)
kw[argument_name] = float(kw[argument_name])
try:
func = op_resolver(endpoint['operationId'])
except Exception as e:
LOG.exception(('Failed to load controller for operation "%s": %s' % (endpoint['operationId'], str(e))))
raise e
try:
resp = func(**kw)
except Exception as e:
LOG.exception(('Failed to call controller function "%s" for operation "%s": %s' % (func.__name__, endpoint['operationId'], str(e))))
raise e
if (resp is None):
resp = Response()
if (not hasattr(resp, '__call__')):
resp = Response(json=resp)
responses = endpoint.get('responses', {})
response_spec = responses.get(str(resp.status_code), None)
default_response_spec = responses.get('default', None)
if ((not response_spec) and default_response_spec):
LOG.debug(('No custom response spec found for endpoint "%s", using a default one' % endpoint['operationId']))
response_spec_name = 'default'
else:
response_spec_name = str(resp.status_code)
response_spec = (response_spec or default_response_spec)
if (response_spec and ('schema' in response_spec)):
LOG.debug(('Using response spec "%s" for endpoint %s and status code %s' % (response_spec_name, endpoint['operationId'], resp.status_code)))
try:
validator = CustomValidator(response_spec['schema'], resolver=self.spec_resolver)
validator.validate(resp.json)
except (jsonschema.ValidationError, ValueError):
LOG.exception('Response validation failed.')
resp.headers.add('Warning', '199 OpenAPI "Response validation failed"')
else:
LOG.debug(('No response spec found for endpoint "%s"' % endpoint['operationId']))
return resp
|
'Converts WSGI request to webob.Request and initiates the response returned by controller.'
| def as_wsgi(self, environ, start_response):
| req = Request(environ)
resp = self(req)
return resp(environ, start_response)
|
'Return a dispatcher class which is used for dispatching triggers.'
| @classmethod
def _get_dispatcher(cls):
| from st2common.transport.reactor import TriggerDispatcher
if (not cls.dispatcher):
cls.dispatcher = TriggerDispatcher(LOG)
return cls.dispatcher
|
'Use this method when -
* upsert=False is desired
* special operators like push, push_all are to be used.'
| @classmethod
def update(cls, model_object, publish=True, dispatch_trigger=True, **kwargs):
| cls._get_impl().update(model_object, **kwargs)
model_object = cls.get_by_id(model_object.id)
if publish:
try:
cls.publish_update(model_object)
except:
LOG.exception('Publish failed.')
if dispatch_trigger:
try:
cls.dispatch_update_trigger(model_object)
except:
LOG.exception('Trigger dispatch failed.')
return model_object
|
'Dispatch a resource-specific trigger which indicates a new resource has been created.'
| @classmethod
def dispatch_create_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='create', model_object=model_object)
|
'Dispatch a resource-specific trigger which indicates an existing resource has been updated.'
| @classmethod
def dispatch_update_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='update', model_object=model_object)
|
'Dispatch a resource-specific trigger which indicates an existing resource has been
deleted.'
| @classmethod
def dispatch_delete_trigger(cls, model_object):
| return cls._dispatch_operation_trigger(operation='delete', model_object=model_object)
|
'Publish the object status to the message queue.
Publish the instance of the model as payload with the status
as routing key to the message queue via the StatePublisher.
:param model_object: An instance of the model.
:type model_object: ``object``'
| @classmethod
def publish_status(cls, model_object):
| publisher = cls._get_publisher()
if publisher:
publisher.publish_state(model_object, getattr(model_object, 'status', None))
|
'Note: We override add_or_update because we also want to publish high level "value_change"
event for this resource.'
| @classmethod
def add_or_update(cls, model_object, publish=True, dispatch_trigger=True):
| if model_object.id:
existing_model_object = cls.get_by_id(value=model_object.id)
else:
existing_model_object = None
model_object = super(KeyValuePair, cls).add_or_update(model_object=model_object, publish=publish, dispatch_trigger=dispatch_trigger)
if (existing_model_object and (existing_model_object.value != model_object.value)):
cls.dispatch_value_change_trigger(old_model_object=existing_model_object, new_model_object=model_object)
return model_object
|
'Retrieve KeyValuePair objects for the provided key names.'
| @classmethod
def get_by_names(cls, names):
| return cls.query(name__in=names)
|
'Get a key value store given a scope and name.
:param scope: Scope which the key belongs to.
:type scope: ``str``
:param name: Name of the key.
:type key: ``str``
:rtype: :class:`KeyValuePairDB` or ``None``'
| @classmethod
def get_by_scope_and_name(cls, scope, name):
| query_result = cls.impl.query(scope=scope, name=name)
return (query_result.first() if query_result else None)
|
'Discover all the packs in the provided directory and register policies from all of the
discovered packs.
:return: Number of policies registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='policies')
for (pack, policies_dir) in six.iteritems(content):
if (not policies_dir):
LOG.debug('Pack %s does not contain policies.', pack)
continue
try:
LOG.debug('Registering policies from pack %s:, dir: %s', pack, policies_dir)
policies = self._get_policies_from_pack(policies_dir)
count = self._register_policies_from_pack(pack=pack, policies=policies)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all policies from pack: %s', policies_dir)
return registered_count
|
'Register all the policies from the provided pack.
:return: Number of policies registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
policies_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='policies')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not policies_dir):
return registered_count
LOG.debug('Registering policies from pack %s, dir: %s', pack, policies_dir)
try:
policies = self._get_policies_from_pack(policies_dir=policies_dir)
registered_count = self._register_policies_from_pack(pack=pack, policies=policies)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all policies from pack: %s', policies_dir)
return registered_count
return registered_count
|
':param use_pack_cache: True to cache which packs have been registered in memory and making
sure packs are only registered once.
:type use_pack_cache: ``bool``
:param fail_on_failure: Throw an exception if resource registration fails.
:type fail_on_failure: ``bool``'
| def __init__(self, use_pack_cache=True, fail_on_failure=False):
| self._use_pack_cache = use_pack_cache
self._fail_on_failure = fail_on_failure
self._meta_loader = MetaLoader()
self._pack_loader = ContentPackLoader()
self._runner_loader = RunnersLoader()
|
'Return a list of registered packs.
:rype: ``list``'
| def get_registered_packs(self):
| return REGISTERED_PACKS_CACHE.keys()
|
'Register packs in all the provided directories.'
| def register_packs(self, base_dirs):
| packs = self._pack_loader.get_packs(base_dirs=base_dirs)
registered_count = 0
for (pack_name, pack_path) in six.iteritems(packs):
self.register_pack(pack_name=pack_name, pack_dir=pack_path)
registered_count += 1
return registered_count
|
'Register pack in the provided directory.'
| def register_pack(self, pack_name, pack_dir):
| if (self._use_pack_cache and (pack_name in REGISTERED_PACKS_CACHE)):
return
LOG.debug(('Registering pack: %s' % pack_name))
REGISTERED_PACKS_CACHE[pack_name] = True
try:
(pack_db, _) = self._register_pack(pack_name=pack_name, pack_dir=pack_dir)
except Exception as e:
if self._fail_on_failure:
msg = ('Failed to register pack "%s": %s' % (pack_name, str(e)))
raise ValueError(msg)
LOG.exception(('Failed to register pack "%s"' % pack_name))
return None
return pack_db
|
'Register a pack and corresponding pack config schema (create a DB object in the system).
Note: Pack registration now happens when registering the content and not when installing
a pack using packs.install. Eventually this will be moved to the pack management API.'
| def _register_pack(self, pack_name, pack_dir):
| pack_db = self._register_pack_db(pack_name=pack_name, pack_dir=pack_dir)
config_path = os.path.join(pack_dir, 'config.yaml')
if os.path.isfile(config_path):
LOG.warning(('Pack "%s" contains a deprecated config.yaml file (%s). Support for "config.yaml" files has been deprecated in StackStorm v1.6.0 in favor of config.schema.yaml config schema files and config files in /opt/stackstorm/configs/ directory.Support for config.yaml files will be removed in next major release (v2.4.0) so you are strongly encouraged to migrate. For more information please refer to %s ' % (pack_db.name, config_path, 'https://docs.stackstorm.com/reference/pack_configs.html')))
config_schema_db = self._register_pack_config_schema_db(pack_name=pack_name, pack_dir=pack_dir)
return (pack_db, config_schema_db)
|
'Discover all the packs in the provided directory and register sensors from all of the
discovered packs.
:return: Number of sensors registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='sensors')
for (pack, sensors_dir) in six.iteritems(content):
if (not sensors_dir):
LOG.debug('Pack %s does not contain sensors.', pack)
continue
try:
LOG.debug('Registering sensors from pack %s:, dir: %s', pack, sensors_dir)
sensors = self._get_sensors_from_pack(sensors_dir)
count = self._register_sensors_from_pack(pack=pack, sensors=sensors)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all sensors from pack "%s": %s', sensors_dir, str(e))
return registered_count
|
'Register all the sensors from the provided pack.
:return: Number of sensors registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
sensors_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='sensors')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not sensors_dir):
return registered_count
LOG.debug('Registering sensors from pack %s:, dir: %s', pack, sensors_dir)
try:
sensors = self._get_sensors_from_pack(sensors_dir=sensors_dir)
registered_count = self._register_sensors_from_pack(pack=pack, sensors=sensors)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all sensors from pack "%s": %s', sensors_dir, str(e))
return registered_count
|
'Discover all the packs in the provided directory and register actions from all of the
discovered packs.
:return: Number of actions registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='actions')
for (pack, actions_dir) in six.iteritems(content):
if (not actions_dir):
LOG.debug('Pack %s does not contain actions.', pack)
continue
try:
LOG.debug('Registering actions from pack %s:, dir: %s', pack, actions_dir)
actions = self._get_actions_from_pack(actions_dir)
count = self._register_actions_from_pack(pack=pack, actions=actions)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all actions from pack: %s', actions_dir)
return registered_count
|
'Register all the actions from the provided pack.
:return: Number of actions registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
actions_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='actions')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not actions_dir):
return registered_count
LOG.debug('Registering actions from pack %s:, dir: %s', pack, actions_dir)
try:
actions = self._get_actions_from_pack(actions_dir=actions_dir)
registered_count = self._register_actions_from_pack(pack=pack, actions=actions)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all actions from pack: %s', actions_dir)
return registered_count
|
'Discover all the packs in the provided directory and register triggers from all of the
discovered packs.
:return: Number of triggers registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='triggers')
for (pack, triggers_dir) in six.iteritems(content):
if (not triggers_dir):
LOG.debug('Pack %s does not contain triggers.', pack)
continue
try:
LOG.debug('Registering triggers from pack %s:, dir: %s', pack, triggers_dir)
triggers = self._get_triggers_from_pack(triggers_dir)
count = self._register_triggers_from_pack(pack=pack, triggers=triggers)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all triggers from pack "%s": %s', triggers_dir, str(e))
return registered_count
|
'Register all the triggers from the provided pack.
:return: Number of triggers registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
triggers_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='triggers')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not triggers_dir):
return registered_count
LOG.debug('Registering triggers from pack %s:, dir: %s', pack, triggers_dir)
try:
triggers = self._get_triggers_from_pack(triggers_dir=triggers_dir)
registered_count = self._register_triggers_from_pack(pack=pack, triggers=triggers)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all triggers from pack "%s": %s', triggers_dir, str(e))
return registered_count
|
'Discover all the packs in the provided directory and register aliases from all of the
discovered packs.
:return: Number of aliases registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='aliases')
for (pack, aliases_dir) in six.iteritems(content):
if (not aliases_dir):
LOG.debug('Pack %s does not contain aliases.', pack)
continue
try:
LOG.debug('Registering aliases from pack %s:, dir: %s', pack, aliases_dir)
aliases = self._get_aliases_from_pack(aliases_dir)
count = self._register_aliases_from_pack(pack=pack, aliases=aliases)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all aliases from pack: %s', aliases_dir)
return registered_count
|
'Register all the aliases from the provided pack.
:return: Number of aliases registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
aliases_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='aliases')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not aliases_dir):
return registered_count
LOG.debug('Registering aliases from pack %s:, dir: %s', pack, aliases_dir)
try:
aliases = self._get_aliases_from_pack(aliases_dir=aliases_dir)
registered_count = self._register_aliases_from_pack(pack=pack, aliases=aliases)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all aliases from pack: %s', aliases_dir)
return registered_count
return registered_count
|
'Retrieve ActionAliasDB object.'
| def _get_action_alias_db(self, pack, action_alias):
| content = self._meta_loader.load(action_alias)
pack_field = content.get('pack', None)
if (not pack_field):
content['pack'] = pack
pack_field = pack
if (pack_field != pack):
raise Exception(('Model is in pack "%s" but field "pack" is different: %s' % (pack, pack_field)))
action_alias_api = ActionAliasAPI(**content)
action_alias_api.validate()
action_alias_db = ActionAliasAPI.to_model(action_alias_api)
return action_alias_db
|
'Register configs for all the available packs.'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
packs = self._pack_loader.get_packs(base_dirs=base_dirs)
pack_names = packs.keys()
for pack_name in pack_names:
config_path = self._get_config_path_for_pack(pack_name=pack_name)
if (not os.path.isfile(config_path)):
LOG.debug('No config found for pack "%s" (file "%s" is not present).', pack_name, config_path)
continue
try:
self._register_config_for_pack(pack=pack_name, config_path=config_path)
except Exception as e:
if self._fail_on_failure:
msg = ('Failed to register config "%s" for pack "%s": %s' % (config_path, pack_name, str(e)))
raise ValueError(msg)
LOG.exception('Failed to register config for pack "%s": %s', pack_name, str(e))
else:
registered_count += 1
return registered_count
|
'Register config for a provided pack.'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack_name) = os.path.split(pack_dir)
self.register_pack(pack_name=pack_name, pack_dir=pack_dir)
config_path = self._get_config_path_for_pack(pack_name=pack_name)
if (not os.path.isfile(config_path)):
return 0
self._register_config_for_pack(pack=pack_name, config_path=config_path)
return 1
|
':return: Number of rules registered.
:rtype: ``int``'
| def register_from_packs(self, base_dirs):
| self.register_packs(base_dirs=base_dirs)
registered_count = 0
content = self._pack_loader.get_content(base_dirs=base_dirs, content_type='rules')
for (pack, rules_dir) in six.iteritems(content):
if (not rules_dir):
LOG.debug('Pack %s does not contain rules.', pack)
continue
try:
LOG.debug('Registering rules from pack: %s', pack)
rules = self._get_rules_from_pack(rules_dir)
count = self._register_rules_from_pack(pack, rules)
registered_count += count
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all rules from pack: %s', rules_dir)
return registered_count
|
'Register all the rules from the provided pack.
:return: Number of rules registered.
:rtype: ``int``'
| def register_from_pack(self, pack_dir):
| pack_dir = (pack_dir[:(-1)] if pack_dir.endswith('/') else pack_dir)
(_, pack) = os.path.split(pack_dir)
rules_dir = self._pack_loader.get_content_from_pack(pack_dir=pack_dir, content_type='rules')
self.register_pack(pack_name=pack, pack_dir=pack_dir)
registered_count = 0
if (not rules_dir):
return registered_count
LOG.debug('Registering rules from pack %s:, dir: %s', pack, rules_dir)
try:
rules = self._get_rules_from_pack(rules_dir=rules_dir)
registered_count = self._register_rules_from_pack(pack=pack, rules=rules)
except Exception as e:
if self._fail_on_failure:
raise e
LOG.exception('Failed registering all rules from pack: %s', rules_dir)
return registered_count
|
':param id: Runner id.
:type id: ``str``'
| def __init__(self, runner_id):
| self.runner_id = runner_id
self.runner_type_db = None
self.container_service = None
self.runner_parameters = None
self.action = None
self.action_name = None
self.liveaction = None
self.liveaction_id = None
self.execution = None
self.execution_id = None
self.entry_point = None
self.libs_dir_path = None
self.context = None
self.callback = None
self.auth_token = None
self.rerun_ex_ref = None
|
'Retrieve pack name for the action which is being currently executed.
:rtype: ``str``'
| def get_pack_name(self):
| if self.action:
return self.action.pack
return DEFAULT_PACK_NAME
|
'Retrieve a name of the user which triggered this action execution.
:rtype: ``str``'
| def get_user(self):
| context = (getattr(self, 'context', {}) or {})
user = context.get('user', cfg.CONF.system_user.user)
return user
|
'Retrieve common ST2_ACTION_ environment variables which will be available to the action.
Note: Environment variables are prefixed with ST2_ACTION_* so they don\'t clash with CLI
environment variables.
:rtype: ``dict``'
| def _get_common_action_env_variables(self):
| result = {}
result['ST2_ACTION_PACK_NAME'] = self.get_pack_name()
result['ST2_ACTION_EXECUTION_ID'] = str(self.execution_id)
result['ST2_ACTION_API_URL'] = get_full_public_api_url()
if self.auth_token:
result['ST2_ACTION_AUTH_TOKEN'] = self.auth_token.token
return result
|
'Transform named arguments to the final form.
:param named_args: Named arguments.
:type named_args: ``dict``
:rtype: ``dict``'
| def _transform_named_args(self, named_args):
| if named_args:
return {(self._kwarg_op + k): v for (k, v) in six.iteritems(named_args)}
return None
|
':param action_parameters: Action parameters.
:type action_parameters: ``dict``
:return: (positional_args, named_args)
:rtype: (``str``, ``dict``)'
| def _get_script_args(self, action_parameters):
| is_script_run_as_cmd = self.runner_parameters.get(RUNNER_COMMAND, None)
pos_args = ''
named_args = {}
if is_script_run_as_cmd:
pos_args = self.runner_parameters.get(RUNNER_COMMAND, '')
named_args = action_parameters
else:
(pos_args, named_args) = action_utils.get_args(action_parameters, self.action)
return (pos_args, named_args)
|
':param config: Action config.
:type config: ``dict``
:param action_service: ActionService object.
:type action_service: :class:`ActionService~'
| def __init__(self, config=None, action_service=None):
| self.config = (config or {})
self.action_service = action_service
self.logger = get_logger_for_python_runner_action(action_name=self.__class__.__name__)
|
':param command: Samba command string.
:type command: ``str``
:param share: Samba share name.
:type share: ``str``'
| def _get_smbclient_command_args(self, host, username, password, command, share='C$', domain=None):
| args = ['smbclient']
values = {'domain': domain, 'username': username, 'password': password}
if domain:
auth_string = ('%(domain)s\\%(username)s%%%(password)s' % values)
else:
auth_string = ('%(username)s%%%(password)s' % values)
args += ['-U', auth_string]
args += [('//%(host)s/%(share)s' % {'host': host, 'share': share})]
args += ['-c', command]
return args
|
':param pack: Name of the pack this action belongs to.
:type pack: ``str``
:param file_path: Path to the action module.
:type file_path: ``str``
:param parameters: action parameters.
:type parameters: ``dict`` or ``None``
:param user: Name of the user who triggered this action execution.
:type user: ``str``
:param parent_args: Command line arguments passed to the parent process.
:type parse_args: ``list``'
| def __init__(self, pack, file_path, parameters=None, user=None, parent_args=None):
| self._pack = pack
self._file_path = file_path
self._parameters = (parameters or {})
self._user = user
self._parent_args = (parent_args or [])
self._class_name = None
self._logger = logging.getLogger('PythonActionWrapper')
try:
config.parse_args(args=self._parent_args)
except Exception as e:
LOG.debug(('Failed to parse config using parent args (parent_args=%s): %s' % (str(self._parent_args), str(e))))
db_setup(ensure_indexes=False)
if (not self._user):
self._user = cfg.CONF.system_user.user
|
':rtype: ``dict``'
| def _get_env_vars(self):
| env_vars = {}
if self._env:
env_vars.update(self._env)
st2_env_vars = self._get_common_action_env_variables()
env_vars.update(st2_env_vars)
return env_vars
|
':param stdout: Stdout which was consumed until the timeout occured.
:type stdout: ``str``
:param stdout: Stderr which was consumed until the timeout occured.
:type stderr: ``str``'
| def __init__(self, cmd, timeout, stdout=None, stderr=None):
| self.cmd = cmd
self.timeout = timeout
self.stdout = stdout
self.stderr = stderr
message = ("Command didn't finish in %s seconds" % timeout)
super(SSHCommandTimeoutError, self).__init__(message)
|
'Authentication is always attempted in the following order:
- The key passed in (if key is provided)
- Any key we can find through an SSH agent (only if no password and
key is provided)
- Any "id_rsa" or "id_dsa" key discoverable in ~/.ssh/ (only if no
password and key is provided)
- Plain username/password auth, if a password was given (if password is
provided)'
| def __init__(self, hostname, port=DEFAULT_SSH_PORT, username=None, password=None, bastion_host=None, key_files=None, key_material=None, timeout=None, passphrase=None):
| self.hostname = hostname
self.port = port
self.username = username
self.password = password
self.key_files = key_files
self.timeout = (timeout or ParamikoSSHClient.CONNECT_TIMEOUT)
self.key_material = key_material
self.bastion_host = bastion_host
self.passphrase = passphrase
self.ssh_config_file = os.path.expanduser((cfg.CONF.ssh_runner.ssh_config_file_path or '~/.ssh/config'))
self.logger = logging.getLogger(__name__)
self.client = None
self.sftp_client = None
self.bastion_client = None
self.bastion_socket = None
|
'Connect to the remote node over SSH.
:return: True if the connection has been successfully established,
False otherwise.
:rtype: ``bool``'
| def connect(self):
| if self.bastion_host:
self.logger.debug('Bastion host specified, connecting')
self.bastion_client = self._connect(host=self.bastion_host)
transport = self.bastion_client.get_transport()
real_addr = (self.hostname, self.port)
local_addr = ('', 0)
self.bastion_socket = transport.open_channel('direct-tcpip', real_addr, local_addr)
self.client = self._connect(host=self.hostname, socket=self.bastion_socket)
return True
|
'Upload a file to the remote node.
:type local_path: ``st``
:param local_path: File path on the local node.
:type remote_path: ``str``
:param remote_path: File path on the remote node.
:type mode: ``int``
:param mode: Permissions mode for the file. E.g. 0744.
:type mirror_local_mode: ``int``
:param mirror_local_mode: Should remote file mirror local mode.
:return: Attributes of the remote file.
:rtype: :class:`posix.stat_result` or ``None``'
| def put(self, local_path, remote_path, mode=None, mirror_local_mode=False):
| if ((not local_path) or (not remote_path)):
raise Exception(('Need both local_path and remote_path. local: %s, remote: %s' % local_path), remote_path)
local_path = quote_unix(local_path)
remote_path = quote_unix(remote_path)
extra = {'_local_path': local_path, '_remote_path': remote_path, '_mode': mode, '_mirror_local_mode': mirror_local_mode}
self.logger.debug('Uploading file', extra=extra)
if (not os.path.exists(local_path)):
raise Exception(('Path %s does not exist locally.' % local_path))
rattrs = self.sftp.put(local_path, remote_path)
if (mode or mirror_local_mode):
local_mode = mode
if ((not mode) or mirror_local_mode):
local_mode = os.stat(local_path).st_mode
if isinstance(local_mode, basestring):
local_mode = int(local_mode, 8)
local_mode = (local_mode & 4095)
remote_mode = rattrs.st_mode
if (remote_mode is not None):
remote_mode = (remote_mode & 4095)
if (local_mode != remote_mode):
self.sftp.chmod(remote_path, local_mode)
return rattrs
|
'Upload a dir to the remote node.
:type local_path: ``str``
:param local_path: Dir path on the local node.
:type remote_path: ``str``
:param remote_path: Base dir path on the remote node.
:type mode: ``int``
:param mode: Permissions mode for the file. E.g. 0744.
:type mirror_local_mode: ``int``
:param mirror_local_mode: Should remote file mirror local mode.
:return: List of files created on remote node.
:rtype: ``list`` of ``str``'
| def put_dir(self, local_path, remote_path, mode=None, mirror_local_mode=False):
| extra = {'_local_path': local_path, '_remote_path': remote_path, '_mode': mode, '_mirror_local_mode': mirror_local_mode}
self.logger.debug('Uploading dir', extra=extra)
if os.path.basename(local_path):
strip = os.path.dirname(local_path)
else:
strip = os.path.dirname(os.path.dirname(local_path))
remote_paths = []
for (context, dirs, files) in os.walk(local_path):
rcontext = context.replace(strip, '', 1)
rcontext = rcontext.replace(os.sep, '/')
rcontext = rcontext.lstrip('/')
rcontext = posixpath.join(remote_path, rcontext)
if (not self.exists(rcontext)):
self.sftp.mkdir(rcontext)
for d in dirs:
n = posixpath.join(rcontext, d)
if (not self.exists(n)):
self.sftp.mkdir(n)
for f in files:
local_path = os.path.join(context, f)
n = posixpath.join(rcontext, f)
p = self.put(local_path=local_path, remote_path=n, mirror_local_mode=mirror_local_mode, mode=mode)
remote_paths.append(p)
return remote_paths
|
'Validate whether a remote file or directory exists.
:param remote_path: Path to remote file.
:type remote_path: ``str``
:rtype: ``bool``'
| def exists(self, remote_path):
| try:
self.sftp.lstat(remote_path).st_mode
except IOError:
return False
return True
|
'Create a directory on remote box.
:param dir_path: Path to remote directory to be created.
:type dir_path: ``str``
:return: Returns nothing if successful else raises IOError exception.
:rtype: ``None``'
| def mkdir(self, dir_path):
| dir_path = quote_unix(dir_path)
extra = {'_dir_path': dir_path}
self.logger.debug('mkdir', extra=extra)
return self.sftp.mkdir(dir_path)
|
'Delete a file on remote box.
:param path: Path to remote file to be deleted.
:type path: ``str``
:return: True if the file has been successfully deleted, False
otherwise.
:rtype: ``bool``'
| def delete_file(self, path):
| path = quote_unix(path)
extra = {'_path': path}
self.logger.debug('Deleting file', extra=extra)
self.sftp.unlink(path)
return True
|
'Delete a dir on remote box.
:param path: Path to remote dir to be deleted.
:type path: ``str``
:param force: Optional Forcefully remove dir.
:type force: ``bool``
:param timeout: Optional Time to wait for dir to be deleted. Only relevant for force.
:type timeout: ``int``
:return: True if the file has been successfully deleted, False
otherwise.
:rtype: ``bool``'
| def delete_dir(self, path, force=False, timeout=None):
| path = quote_unix(path)
extra = {'_path': path}
if force:
command = ('rm -rf %s' % path)
extra['_command'] = command
extra['_force'] = force
self.logger.debug('Deleting dir', extra=extra)
return self.run(command, timeout=timeout)
self.logger.debug('Deleting dir', extra=extra)
return self.sftp.rmdir(path)
|
'Note: This function is based on paramiko\'s exec_command()
method.
:param timeout: How long to wait (in seconds) for the command to
finish (optional).
:type timeout: ``float``'
| def run(self, cmd, timeout=None, quote=False):
| if quote:
cmd = quote_unix(cmd)
extra = {'_cmd': cmd}
self.logger.info('Executing command', extra=extra)
bufsize = (-1)
transport = self.client.get_transport()
chan = transport.open_session()
start_time = time.time()
if cmd.startswith('sudo'):
chan.get_pty()
chan.exec_command(cmd)
stdout = StringIO()
stderr = StringIO()
stdin = chan.makefile('wb', bufsize)
stdin.close()
exit_status_ready = chan.exit_status_ready()
if exit_status_ready:
stdout.write(self._consume_stdout(chan).getvalue())
stderr.write(self._consume_stderr(chan).getvalue())
while (not exit_status_ready):
current_time = time.time()
elapsed_time = (current_time - start_time)
if (timeout and (elapsed_time > timeout)):
chan.close()
stdout = strip_shell_chars(stdout.getvalue())
stderr = strip_shell_chars(stderr.getvalue())
raise SSHCommandTimeoutError(cmd=cmd, timeout=timeout, stdout=stdout, stderr=stderr)
stdout.write(self._consume_stdout(chan).getvalue())
stderr.write(self._consume_stderr(chan).getvalue())
exit_status_ready = chan.exit_status_ready()
if exit_status_ready:
break
eventlet.sleep(self.SLEEP_DELAY)
status = chan.recv_exit_status()
stdout = strip_shell_chars(stdout.getvalue())
stderr = strip_shell_chars(stderr.getvalue())
extra = {'_status': status, '_stdout': stdout, '_stderr': stderr}
self.logger.debug('Command finished', extra=extra)
return [stdout, stderr, status]
|
'Method which lazily establishes SFTP connection if one is not established yet when this
variable is accessed.'
| @property
def sftp(self):
| if (not self.sftp_client):
self.sftp_client = self.client.open_sftp()
return self.sftp_client
|
'Try to consume stdout data from chan if it\'s receive ready.'
| def _consume_stdout(self, chan):
| out = bytearray()
stdout = StringIO()
if chan.recv_ready():
data = chan.recv(self.CHUNK_SIZE)
out += data
while data:
ready = chan.recv_ready()
if (not ready):
break
data = chan.recv(self.CHUNK_SIZE)
out += data
stdout.write(self._get_decoded_data(out))
return stdout
|
'Try to consume stderr data from chan if it\'s receive ready.'
| def _consume_stderr(self, chan):
| out = bytearray()
stderr = StringIO()
if chan.recv_stderr_ready():
data = chan.recv_stderr(self.CHUNK_SIZE)
out += data
while data:
ready = chan.recv_stderr_ready()
if (not ready):
break
data = chan.recv_stderr(self.CHUNK_SIZE)
out += data
stderr.write(self._get_decoded_data(out))
return stderr
|
'Try to detect private key type and return paramiko.PKey object.'
| def _get_pkey_object(self, key_material, passphrase):
| for cls in [paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey]:
try:
key = cls.from_private_key(StringIO(key_material), password=passphrase)
except paramiko.ssh_exception.SSHException:
pass
else:
return key
contains_header = (REMOTE_RUNNER_PRIVATE_KEY_HEADER in key_material.lower())
if ((not contains_header) and ((key_material.count('/') >= 1) or (key_material.count('\\') >= 1))):
msg = '"private_key" parameter needs to contain private key data / content and not a path'
elif passphrase:
msg = 'Invalid passphrase or invalid/unsupported key type'
else:
msg = 'Invalid or unsupported key type'
raise paramiko.ssh_exception.SSHException(msg)
|
'Order of precedence for SSH connection parameters:
1. If user supplies parameters via action parameters, we use them to connect.
2. For parameters not supplied via action parameters, if there is an entry
for host in SSH config file, we use those. Note that this is a merge operation.
3. If user does not supply certain action parameters (username and key file location)
and there is no entry for host in SSH config file, we use values supplied in
st2 config file for those parameters.
:type host: ``str``
:param host: Host to connect to
:type socket: :class:`paramiko.Channel` or an opened :class:`socket.socket`
:param socket: If specified, won\'t open a socket for communication to the specified host
and will use this instead
:return: A connected SSHClient
:rtype: :class:`paramiko.SSHClient`'
| def _connect(self, host, socket=None):
| conninfo = {'hostname': host, 'allow_agent': False, 'look_for_keys': False, 'timeout': self.timeout}
ssh_config_file_info = {}
if cfg.CONF.ssh_runner.use_ssh_config:
ssh_config_file_info = self._get_ssh_config_for_host(host)
self.username = (self.username or ssh_config_file_info.get('user', None) or cfg.CONF.system_user.user)
self.port = (self.port or ssh_config_file_info.get(('port' or None)) or DEFAULT_SSH_PORT)
if (self.key_files and self.key_material):
msg = 'key_files and key_material arguments are mutually exclusive. Supply only one.'
raise ValueError(msg)
if ((not self.key_material) and (not self.password)):
self.key_files = (self.key_files or ssh_config_file_info.get('identityfile', None) or cfg.CONF.system_user.ssh_key_file)
if (self.passphrase and (not (self.key_files or self.key_material))):
raise ValueError('passphrase should accompany private key material')
credentials_provided = (self.password or self.key_files or self.key_material)
if (not credentials_provided):
msg = ('Either password or key file location or key material should be supplied ' + ('for action. You can also add an entry for host %s in SSH config file %s.' % (host, self.ssh_config_file)))
raise ValueError(msg)
conninfo['username'] = self.username
conninfo['port'] = self.port
if self.password:
conninfo['password'] = self.password
if self.key_files:
conninfo['key_filename'] = self.key_files
passphrase_reqd = self._is_key_file_needs_passphrase(self.key_files)
if (passphrase_reqd and (not self.passphrase)):
msg = ('Private key file %s is passphrase protected. Supply a passphrase.' % self.key_files)
raise paramiko.ssh_exception.PasswordRequiredException(msg)
if self.passphrase:
conninfo['password'] = self.passphrase
if self.key_material:
conninfo['pkey'] = self._get_pkey_object(key_material=self.key_material, passphrase=self.passphrase)
if ((not self.password) and (not (self.key_files or self.key_material))):
conninfo['allow_agent'] = True
conninfo['look_for_keys'] = True
extra = {'_hostname': host, '_port': self.port, '_username': self.username, '_timeout': self.timeout}
self.logger.debug('Connecting to server', extra=extra)
socket = (socket or ssh_config_file_info.get('sock', None))
if socket:
conninfo['sock'] = socket
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
extra = {'_conninfo': conninfo}
self.logger.debug('Connection info', extra=extra)
try:
client.connect(**conninfo)
except SSHException as e:
paramiko_msg = e.message
if conninfo.get('password', None):
conninfo['password'] = '<redacted>'
msg = ((('Error connecting to host %s ' % host) + ('with connection parameters %s.' % conninfo)) + ('Paramiko error: %s.' % paramiko_msg))
raise SSHException(msg)
return client
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.