code
stringlengths
66
870k
docstring
stringlengths
19
26.7k
func_name
stringlengths
1
138
language
stringclasses
1 value
repo
stringlengths
7
68
path
stringlengths
5
324
url
stringlengths
46
389
license
stringclasses
7 values
def generate_toolbox_deployment( service: str, cluster: str, user: str ) -> EksDeploymentConfig: """Creates virtual EKS deployment for toolbox containers starting from adhoc configuration :param str service: toolbox name :param str cluster: target deployment cluster :param str user: user requesting the toolbox :return: deployment configuration """ if not user.isalnum(): raise RemoteRunError( f"Provided username contains non-alphanumeric characters: {user}" ) # NOTE: API authorization is enforced by service, and we want different rules # for each toolbox, so clients send a combined service-instance string, and then # we split it here to load the correct instance settings. adhoc_instance = service[len(TOOLBOX_MOCK_SERVICE) + 1 :] adhoc_deployment = load_adhoc_job_config( TOOLBOX_MOCK_SERVICE, adhoc_instance, cluster, load_deployments=False, ) # NOTE: we're explicitly dynamically mounting a single user's public keys # as we want these pods to only be usable by said user. adhoc_deployment.config_dict.setdefault("extra_volumes", []).append( { "containerPath": f"/etc/authorized_keys.d/{user}.pub", "hostPath": f"/etc/authorized_keys.d/{user}.pub", "mode": "RO", }, ) adhoc_deployment.config_dict.setdefault("env", {})["SANDBOX_USER"] = user adhoc_deployment.config_dict["routable_ip"] = True return EksDeploymentConfig( service=service, cluster=cluster, instance="main", config_dict=adhoc_deployment.config_dict, branch_dict=adhoc_deployment.branch_dict, )
Creates virtual EKS deployment for toolbox containers starting from adhoc configuration :param str service: toolbox name :param str cluster: target deployment cluster :param str user: user requesting the toolbox :return: deployment configuration
generate_toolbox_deployment
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def find_job_pod( kube_client: KubeClient, namespace: str, job_name: str, job_label: str = REMOTE_RUN_JOB_LABEL, retries: int = 3, ) -> Optional[V1Pod]: """Locate pod for remote-run job :param KubeClient kube_client: Kubernetes client :param str namespace: the pod namespace :param str job_name: remote-run job name :param int retries: maximum number of attemps :return: pod object if found """ selectors = ( f"{paasta_prefixed(JOB_TYPE_LABEL_NAME)}={job_label}", f"job-name={job_name}", ) for _ in range(retries): pod_list = kube_client.core.list_namespaced_pod( namespace, label_selector=",".join(selectors), ) if pod_list.items: return pod_list.items[0] sleep(0.5) return None
Locate pod for remote-run job :param KubeClient kube_client: Kubernetes client :param str namespace: the pod namespace :param str job_name: remote-run job name :param int retries: maximum number of attemps :return: pod object if found
find_job_pod
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def create_temp_exec_token( kube_client: KubeClient, namespace: str, service_account: str, ) -> str: """Create a short lived token for service account :param KubeClient kube_client: Kubernetes client :param str namespace: service account namespace :param str service_account: service account name :return: token value """ token_spec = V1TokenRequestSpec( expiration_seconds=600, # minimum allowed by k8s audiences=[], ) request = AuthenticationV1TokenRequest(spec=token_spec) response = kube_client.core.create_namespaced_service_account_token( service_account, namespace, request ) return response.status.token
Create a short lived token for service account :param KubeClient kube_client: Kubernetes client :param str namespace: service account namespace :param str service_account: service account name :return: token value
create_temp_exec_token
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def get_remote_run_service_accounts( kube_client: KubeClient, namespace: str, user: str = "" ) -> Sequence[V1ServiceAccount]: """List all temporary service account related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str user: optionally filter by owning user :return: list of service accounts """ return get_all_service_accounts( kube_client, namespace=namespace, label_selector=(f"{POD_OWNER_LABEL}={user}" if user else POD_OWNER_LABEL), )
List all temporary service account related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str user: optionally filter by owning user :return: list of service accounts
get_remote_run_service_accounts
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def create_remote_run_service_account( kube_client: KubeClient, namespace: str, pod_name: str, user: str, ) -> str: """Create service account to exec into remote-run pod :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str pod_name: pod name :param str user: user requiring credentials """ pod_name_hash = hashlib.sha1(pod_name.encode("utf-8")).hexdigest()[:12] service_account_name = limit_size_with_hash(f"remote-run-{user}-{pod_name_hash}") service_accounts = get_remote_run_service_accounts(kube_client, namespace, user) if any(item.metadata.name == service_account_name for item in service_accounts): return service_account_name service_account = V1ServiceAccount( metadata=V1ObjectMeta( name=service_account_name, namespace=namespace, labels={POD_OWNER_LABEL: user}, ) ) kube_client.core.create_namespaced_service_account( namespace=namespace, body=service_account ) return service_account_name
Create service account to exec into remote-run pod :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str pod_name: pod name :param str user: user requiring credentials
create_remote_run_service_account
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def create_pod_scoped_role( kube_client: KubeClient, namespace: str, pod_name: str, user: str, ) -> str: """Create role with execution access to specific pod :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str pod_name: pod name :param str user: user requiring the role :return: name of the role """ pod_name_hash = hashlib.sha1(pod_name.encode("utf-8")).hexdigest()[:12] role_name = f"remote-run-role-{pod_name_hash}" policy = V1PolicyRule( verbs=["create", "get"], resources=["pods", "pods/exec"], resource_names=[pod_name], api_groups=[""], ) role = V1Role( rules=[policy], metadata=V1ObjectMeta( name=role_name, labels={POD_OWNER_LABEL: user}, ), ) try: kube_client.rbac.create_namespaced_role(namespace=namespace, body=role) except ApiException as e: if e.status != 409: raise return role_name
Create role with execution access to specific pod :param KubeClient kube_client: Kubernetes client :param str namespace: pod namespace :param str pod_name: pod name :param str user: user requiring the role :return: name of the role
create_pod_scoped_role
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def bind_role_to_service_account( kube_client: KubeClient, namespace: str, service_account: str, role: str, user: str, ) -> None: """Bind service account to role :param KubeClient kube_client: Kubernetes client :param str namespace: service account namespace :param str service_account: service account name :param str role: role name :param str user: user requiring the role """ role_binding = V1RoleBinding( metadata=V1ObjectMeta( name=limit_size_with_hash(f"remote-run-binding-{role}"), namespace=namespace, labels={POD_OWNER_LABEL: user}, ), role_ref=V1RoleRef( api_group="rbac.authorization.k8s.io", kind="Role", name=role, ), subjects=[ V1Subject( kind="ServiceAccount", name=service_account, ), ], ) try: kube_client.rbac.create_namespaced_role_binding( namespace=namespace, body=role_binding, ) except ApiException as e: if e.status != 409: raise
Bind service account to role :param KubeClient kube_client: Kubernetes client :param str namespace: service account namespace :param str service_account: service account name :param str role: role name :param str user: user requiring the role
bind_role_to_service_account
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def get_remote_run_roles(kube_client: KubeClient, namespace: str) -> List[V1Role]: """List all temporary roles related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: role namespace :return: list of roles """ return kube_client.rbac.list_namespaced_role( namespace, label_selector=POD_OWNER_LABEL, ).items
List all temporary roles related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: role namespace :return: list of roles
get_remote_run_roles
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def get_remote_run_role_bindings( kube_client: KubeClient, namespace: str ) -> List[V1RoleBinding]: """List all temporary role bindings related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: role namespace :return: list of roles """ return kube_client.rbac.list_namespaced_role_binding( namespace, label_selector=POD_OWNER_LABEL, ).items
List all temporary role bindings related to remote-run :param KubeClient kube_client: Kubernetes client :param str namespace: role namespace :return: list of roles
get_remote_run_role_bindings
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def get_remote_run_jobs(kube_client: KubeClient, namespace: str) -> List[V1Job]: """List all remote-run jobs :param KubeClient kube_client: Kubernetes client :param str namespace: job namespace """ return kube_client.batches.list_namespaced_job( namespace, label_selector=f"{paasta_prefixed(JOB_TYPE_LABEL_NAME)}={REMOTE_RUN_JOB_LABEL}", ).items
List all remote-run jobs :param KubeClient kube_client: Kubernetes client :param str namespace: job namespace
get_remote_run_jobs
python
Yelp/paasta
paasta_tools/kubernetes/remote_run.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/remote_run.py
Apache-2.0
def __init__( self, item: Union[V1Deployment, V1StatefulSet], logging=logging.getLogger(__name__), ) -> None: """ This Application wrapper is an interface for creating/deleting k8s deployments and statefulsets soa_config is KubernetesDeploymentConfig. It is not loaded in init because it is not always required. :param item: Kubernetes Object(V1Deployment/V1StatefulSet) that has already been filled up. :param logging: where logs go """ if not item.metadata.namespace: item.metadata.namespace = "paasta" attrs = { attr: item.metadata.labels.get(paasta_prefixed(attr)) for attr in [ "service", "instance", "git_sha", "image_version", "config_sha", ] } replicas = ( item.spec.replicas if item.metadata.labels.get(paasta_prefixed("autoscaled"), "false") == "false" else None ) self.kube_deployment = KubeDeployment( replicas=replicas, namespace=item.metadata.namespace, **attrs ) self.item = item self.soa_config = None # type: KubernetesDeploymentConfig self.logging = logging
This Application wrapper is an interface for creating/deleting k8s deployments and statefulsets soa_config is KubernetesDeploymentConfig. It is not loaded in init because it is not always required. :param item: Kubernetes Object(V1Deployment/V1StatefulSet) that has already been filled up. :param logging: where logs go
__init__
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def ensure_service_account(self, kube_client: KubeClient) -> None: """ Ensure that the service account for this application exists :param kube_client: """ if self.soa_config.get_iam_role(): ensure_service_account( iam_role=self.soa_config.get_iam_role(), namespace=self.soa_config.get_namespace(), kube_client=kube_client, )
Ensure that the service account for this application exists :param kube_client:
ensure_service_account
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def deep_delete( self, kube_client: KubeClient, propagation_policy="Foreground" ) -> None: """ Remove all controllers, pods, and pod disruption budgets related to this application :param kube_client: """ delete_options = V1DeleteOptions(propagation_policy=propagation_policy) try: kube_client.deployments.delete_namespaced_deployment( self.item.metadata.name, self.item.metadata.namespace, body=delete_options, ) except ApiException as e: if e.status == 404: # Deployment does not exist, nothing to delete but # we can consider this a success. self.logging.debug( "not deleting nonexistent deploy/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace ) ) else: raise else: self.logging.info( "deleted deploy/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace ) ) self.delete_pod_disruption_budget(kube_client) self.delete_horizontal_pod_autoscaler(kube_client)
Remove all controllers, pods, and pod disruption budgets related to this application :param kube_client:
deep_delete
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def sync_horizontal_pod_autoscaler(self, kube_client: KubeClient) -> None: """ In order for autoscaling to work, there needs to be at least two configurations min_instnace, max_instance, and there cannot be instance. """ desired_hpa_spec = self.soa_config.get_autoscaling_metric_spec( name=self.item.metadata.name, cluster=self.soa_config.cluster, kube_client=kube_client, namespace=self.item.metadata.namespace, min_instances_override=( self.hpa_override["min_instances"] if self.hpa_override else None ), ) hpa_exists = self.exists_hpa(kube_client) should_have_hpa = desired_hpa_spec and not autoscaling_is_paused() if not should_have_hpa: self.logging.info( f"No HPA required for {self.item.metadata.name}/name in {self.item.metadata.namespace}" ) if hpa_exists: self.logging.info( f"Deleting HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}" ) self.delete_horizontal_pod_autoscaler(kube_client) return self.logging.info( f"Syncing HPA setting for {self.item.metadata.name}/name in {self.item.metadata.namespace}" ) self.logging.debug(desired_hpa_spec) if not hpa_exists: self.logging.info( f"Creating new HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}" ) kube_client.autoscaling.create_namespaced_horizontal_pod_autoscaler( namespace=self.item.metadata.namespace, body=desired_hpa_spec, pretty=True, ) else: self.logging.info( f"Updating new HPA for {self.item.metadata.name}/name in {self.item.metadata.namespace}/namespace" ) kube_client.autoscaling.replace_namespaced_horizontal_pod_autoscaler( name=self.item.metadata.name, namespace=self.item.metadata.namespace, body=desired_hpa_spec, pretty=True, )
In order for autoscaling to work, there needs to be at least two configurations min_instnace, max_instance, and there cannot be instance.
sync_horizontal_pod_autoscaler
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def deep_delete(self, kube_client: KubeClient) -> None: """ Remove all controllers, pods, and pod disruption budgets related to this application :param kube_client: """ delete_options = V1DeleteOptions(propagation_policy="Foreground") try: kube_client.deployments.delete_namespaced_stateful_set( self.item.metadata.name, self.item.metadata.namespace, body=delete_options, ) except ApiException as e: if e.status == 404: # StatefulSet does not exist, nothing to delete but # we can consider this a success. self.logging.debug( "not deleting nonexistent statefulset/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace ) ) else: raise else: self.logging.info( "deleted statefulset/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace ) ) self.delete_pod_disruption_budget(kube_client)
Remove all controllers, pods, and pod disruption budgets related to this application :param kube_client:
deep_delete
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def deep_delete(self, kube_client: KubeClient) -> None: """Remove resources related to the job""" delete_options = V1DeleteOptions(propagation_policy="Foreground") try: kube_client.batches.delete_namespaced_job( self.item.metadata.name, self.item.metadata.namespace, body=delete_options, ) except ApiException as e: if e.status == 404: # Job does not exist, nothing to delete but # we can consider this a success. self.logging.debug( "not deleting nonexistent job/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace, ) ) else: raise else: self.logging.info( "deleted job/{} from namespace/{}".format( self.item.metadata.name, self.item.metadata.namespace, ) )
Remove resources related to the job
deep_delete
python
Yelp/paasta
paasta_tools/kubernetes/application/controller_wrappers.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/controller_wrappers.py
Apache-2.0
def list_all_applications( kube_client: KubeClient, application_types: Sequence[Any] ) -> Dict[Tuple[str, str], List[Application]]: """ List all applications in the cluster of the types from application_types. Only applications with complete set of labels are included (See is_valid_application()). :param kube_client: :param application_types: types of applications :return: A mapping from (service, instance) to application """ apps: Dict[Tuple[str, str], List[Application]] = {} for application_type in application_types: if application_type == V1Deployment: apps = {**apps, **list_paasta_managed_deployments(kube_client)} elif application_type == V1StatefulSet: apps.update(list_paasta_managed_stateful_sets(kube_client)) return apps
List all applications in the cluster of the types from application_types. Only applications with complete set of labels are included (See is_valid_application()). :param kube_client: :param application_types: types of applications :return: A mapping from (service, instance) to application
list_all_applications
python
Yelp/paasta
paasta_tools/kubernetes/application/tools.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/application/tools.py
Apache-2.0
def set_temporary_environment_variables( environ: Mapping[str, str] ) -> Generator[None, None, None]: """ *Note the return value means "yields None, takes None, and when finished, returns None"* Modifies the os.environ variable then yields this temporary state. Resets it when finished. :param environ: Environment variables to set """ old_environ = dict(os.environ) # ensure we're storing a copy os.environ.update(environ) try: yield finally: os.environ.clear() os.environ.update(old_environ)
*Note the return value means "yields None, takes None, and when finished, returns None"* Modifies the os.environ variable then yields this temporary state. Resets it when finished. :param environ: Environment variables to set
set_temporary_environment_variables
python
Yelp/paasta
paasta_tools/kubernetes/bin/paasta_secrets_sync.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/bin/paasta_secrets_sync.py
Apache-2.0
def get_services_to_k8s_namespaces_to_allowlist( service_list: List[str], cluster: str, soa_dir: str, kube_client: KubeClient ) -> Dict[ str, # service Dict[ str, # namespace Optional[Set[str]], # allowlist of secret names, None means allow all. ], ]: """ Generate a mapping of service -> namespace -> allowlist of secrets, e.g. { "yelp-main": { "paasta": {"secret1", "secret2"}, "paastasvc-yelp-main": {"secret1", "secret3"}, "paasta-flinks": None, }, "_shared": { "paasta": {"sharedsecret1"}, "paastasvc-yelp-main": {"sharedsecret1", "sharedsecret2"}, "paasta-flinks": None, } } This mapping is used by sync_all_secrets / sync_secrets: sync_secrets will only sync secrets into a namespace if the allowlist is None or contains that secret's name. """ services_to_k8s_namespaces_to_allowlist: Dict[ str, Dict[str, Optional[Set[str]]] ] = defaultdict(dict) for service in service_list: if service == "_shared": # _shared is handled specially for each service. continue config_loader = PaastaServiceConfigLoader(service, soa_dir) for instance_type_class in K8S_INSTANCE_TYPE_CLASSES: for service_instance_config in config_loader.instance_configs( cluster=cluster, instance_type_class=instance_type_class ): secrets_used, shared_secrets_used = get_secrets_used_by_instance( service_instance_config ) allowlist = services_to_k8s_namespaces_to_allowlist[service].setdefault( service_instance_config.get_namespace(), set(), ) if allowlist is not None: allowlist.update(secrets_used) if "_shared" in service_list: shared_allowlist = services_to_k8s_namespaces_to_allowlist[ "_shared" ].setdefault( service_instance_config.get_namespace(), set(), ) if shared_allowlist is not None: shared_allowlist.update(shared_secrets_used) for instance_type in INSTANCE_TYPES: if instance_type in PAASTA_K8S_INSTANCE_TYPES: continue # handled above. instances = get_service_instance_list( service=service, instance_type=instance_type, cluster=cluster, soa_dir=soa_dir, ) if instances: # Currently, all instance types besides kubernetes use one big namespace, defined in # INSTANCE_TYPE_TO_K8S_NAMESPACE. Sync all shared secrets and all secrets belonging to any service # which uses that instance type. services_to_k8s_namespaces_to_allowlist[service][ INSTANCE_TYPE_TO_K8S_NAMESPACE[instance_type] ] = None if "_shared" in service_list: services_to_k8s_namespaces_to_allowlist["_shared"][ INSTANCE_TYPE_TO_K8S_NAMESPACE[instance_type] ] = None return dict(services_to_k8s_namespaces_to_allowlist)
Generate a mapping of service -> namespace -> allowlist of secrets, e.g. { "yelp-main": { "paasta": {"secret1", "secret2"}, "paastasvc-yelp-main": {"secret1", "secret3"}, "paasta-flinks": None, }, "_shared": { "paasta": {"sharedsecret1"}, "paastasvc-yelp-main": {"sharedsecret1", "sharedsecret2"}, "paasta-flinks": None, } } This mapping is used by sync_all_secrets / sync_secrets: sync_secrets will only sync secrets into a namespace if the allowlist is None or contains that secret's name.
get_services_to_k8s_namespaces_to_allowlist
python
Yelp/paasta
paasta_tools/kubernetes/bin/paasta_secrets_sync.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/bin/paasta_secrets_sync.py
Apache-2.0
def sync_datastore_credentials( kube_client: KubeClient, cluster: str, service: str, secret_provider_name: str, vault_cluster_config: Dict[str, str], soa_dir: str, vault_token_file: str, overwrite_namespace: Optional[str] = None, ) -> bool: """ Map all the passwords requested for this service-instance to a single Kubernetes Secret store. Volume mounts will then map the associated secrets to their associated mount paths. """ config_loader = PaastaServiceConfigLoader(service=service, soa_dir=soa_dir) system_paasta_config = load_system_paasta_config() datastore_credentials_vault_overrides = ( system_paasta_config.get_datastore_credentials_vault_overrides() ) for instance_type_class in K8S_INSTANCE_TYPE_CLASSES: for instance_config in config_loader.instance_configs( cluster=cluster, instance_type_class=instance_type_class ): namespace = ( overwrite_namespace if overwrite_namespace is not None else instance_config.get_namespace() ) datastore_credentials = instance_config.get_datastore_credentials() with set_temporary_environment_variables( datastore_credentials_vault_overrides ): # expects VAULT_ADDR_OVERRIDE, VAULT_CA_OVERRIDE, and VAULT_TOKEN_OVERRIDE to be set # in order to use a custom vault shard. overriden temporarily in this context provider = get_secret_provider( secret_provider_name=secret_provider_name, soa_dir=soa_dir, service_name=service, cluster_names=[cluster], # overridden by env variables but still needed here for spec validation secret_provider_kwargs={ "vault_cluster_config": vault_cluster_config, "vault_auth_method": "token", "vault_token_file": vault_token_file, }, ) secret_data = {} for datastore, credentials in datastore_credentials.items(): # mypy loses type hints on '.items' and throws false positives. unfortunately have to type: ignore # https://github.com/python/mypy/issues/7178 for credential in credentials: # type: ignore vault_path = f"secrets/datastore/{datastore}/{credential}" secrets = provider.get_data_from_vault_path(vault_path) if not secrets: # no secrets found at this path. skip syncing log.debug( f"Warning: no secrets found at requested path {vault_path}." ) continue # decrypt and save in secret_data vault_key_path = get_vault_key_secret_name(vault_path) # kubernetes expects data to be base64 encoded binary in utf-8 when put into secret maps # may look like: # {'master': {'passwd': '****', 'user': 'v-approle-mysql-serv-nVcYexH95A2'}, 'reporting': {'passwd': '****', 'user': 'v-approle-mysql-serv-GgCpRIh9Ut7'}, 'slave': {'passwd': '****', 'user': 'v-approle-mysql-serv-PzjPwqNMbqu'} secret_data[vault_key_path] = base64.b64encode( json.dumps(secrets).encode("utf-8") ).decode("utf-8") create_or_update_k8s_secret( service=service, signature_name=instance_config.get_datastore_credentials_signature_name(), secret_name=instance_config.get_datastore_credentials_secret_name(), get_secret_data=(lambda: secret_data), secret_signature=_get_dict_signature(secret_data), kube_client=kube_client, namespace=namespace, ) return True
Map all the passwords requested for this service-instance to a single Kubernetes Secret store. Volume mounts will then map the associated secrets to their associated mount paths.
sync_datastore_credentials
python
Yelp/paasta
paasta_tools/kubernetes/bin/paasta_secrets_sync.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/bin/paasta_secrets_sync.py
Apache-2.0
def sync_crypto_secrets( kube_client: KubeClient, cluster: str, service: str, secret_provider_name: str, vault_cluster_config: Dict[str, str], soa_dir: str, vault_token_file: str, ) -> bool: """ For each key-name in `crypto_key`, 1. Fetch all versions of the key-name from Vault superregion mapped from cluster, e.g. `kubestage` maps to `devc` Vault server. 2. Create K8s secret from JSON blob containing all key versions. 3. Create signatures as K8s configmap based on JSON blob hash. So each replica of a service instance gets the same key, thereby reducing requests to Vault API as we only talk to vault during secret syncing """ config_loader = PaastaServiceConfigLoader(service=service, soa_dir=soa_dir) for instance_type_class in K8S_INSTANCE_TYPE_CLASSES: for instance_config in config_loader.instance_configs( cluster=cluster, instance_type_class=instance_type_class ): crypto_keys = instance_config.get_crypto_keys_from_config() if not crypto_keys: continue secret_data = {} provider = get_secret_provider( secret_provider_name=secret_provider_name, soa_dir=soa_dir, service_name=service, cluster_names=[cluster], secret_provider_kwargs={ "vault_cluster_config": vault_cluster_config, "vault_auth_method": "token", "vault_token_file": vault_token_file, }, ) for key in crypto_keys: key_versions = provider.get_key_versions(key) if not key_versions: log.error( f"No key versions found for {key} on {instance_config.get_sanitised_deployment_name()}" ) continue secret_data[get_vault_key_secret_name(key)] = base64.b64encode( json.dumps(key_versions).encode("utf-8") ).decode("utf-8") if not secret_data: continue create_or_update_k8s_secret( service=service, signature_name=instance_config.get_crypto_secret_signature_name(), # the secret name here must match the secret name given in the secret volume config, # i.e. `kubernetes.client.V1SecretVolumeSource`'s `secret_name` must match below secret_name=instance_config.get_crypto_secret_name(), get_secret_data=(lambda: secret_data), secret_signature=_get_dict_signature(secret_data), kube_client=kube_client, namespace=instance_config.get_namespace(), ) return True
For each key-name in `crypto_key`, 1. Fetch all versions of the key-name from Vault superregion mapped from cluster, e.g. `kubestage` maps to `devc` Vault server. 2. Create K8s secret from JSON blob containing all key versions. 3. Create signatures as K8s configmap based on JSON blob hash. So each replica of a service instance gets the same key, thereby reducing requests to Vault API as we only talk to vault during secret syncing
sync_crypto_secrets
python
Yelp/paasta
paasta_tools/kubernetes/bin/paasta_secrets_sync.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/bin/paasta_secrets_sync.py
Apache-2.0
def create_or_update_k8s_secret( service: str, secret_name: str, signature_name: str, get_secret_data: Callable[[], Dict[str, str]], secret_signature: str, kube_client: KubeClient, namespace: str, ) -> None: """ :param get_secret_data: is a function to postpone fetching data in order to reduce service load, e.g. Vault API """ # In order to prevent slamming the k8s API, add some artificial delay here delay = load_system_paasta_config().get_secret_sync_delay_seconds() if delay: time.sleep(delay) kubernetes_signature = get_secret_signature( kube_client=kube_client, signature_name=signature_name, namespace=namespace, ) if not kubernetes_signature: log.info(f"{secret_name} for {service} in {namespace} not found, creating") try: create_secret( kube_client=kube_client, service_name=service, secret_name=secret_name, secret_data=get_secret_data(), namespace=namespace, ) except ApiException as e: if e.status == 409: log.warning( f"Secret {secret_name} for {service} already exists in {namespace} but no signature found. Updating secret and signature." ) update_secret( kube_client=kube_client, secret_name=secret_name, secret_data=get_secret_data(), service_name=service, namespace=namespace, ) else: raise create_secret_signature( kube_client=kube_client, service_name=service, signature_name=signature_name, secret_signature=secret_signature, namespace=namespace, ) elif secret_signature != kubernetes_signature: log.info( f"{secret_name} for {service} in {namespace} needs updating as signature changed" ) update_secret( kube_client=kube_client, secret_name=secret_name, secret_data=get_secret_data(), service_name=service, namespace=namespace, ) update_secret_signature( kube_client=kube_client, service_name=service, signature_name=signature_name, secret_signature=secret_signature, namespace=namespace, ) else: log.info(f"{secret_name} for {service} in {namespace} up to date")
:param get_secret_data: is a function to postpone fetching data in order to reduce service load, e.g. Vault API
create_or_update_k8s_secret
python
Yelp/paasta
paasta_tools/kubernetes/bin/paasta_secrets_sync.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/kubernetes/bin/paasta_secrets_sync.py
Apache-2.0
def resolve(self, cfg): """Resolve the URL to the mesos master. The value of cfg should be one of: - host:port - zk://host1:port1,host2:port2/path - zk://username:password@host1:port1/path - file:///path/to/file (where file contains one of the above) """ if cfg.startswith("zk:"): return self._zookeeper_resolver(cfg) elif cfg.startswith("file:"): return self._file_resolver(cfg) else: return cfg
Resolve the URL to the mesos master. The value of cfg should be one of: - host:port - zk://host1:port1,host2:port2/path - zk://username:password@host1:port1/path - file:///path/to/file (where file contains one of the above)
resolve
python
Yelp/paasta
paasta_tools/mesos/master.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/mesos/master.py
Apache-2.0
def stream(fn, elements, workers): """Yield the results of fn as jobs complete.""" jobs = [] with execute(workers) as executor: for elem in elements: jobs.append(executor.submit(fn, elem)) for job in concurrent.futures.as_completed(jobs): try: yield job.result() except exceptions.SkipResult: pass
Yield the results of fn as jobs complete.
stream
python
Yelp/paasta
paasta_tools/mesos/parallel.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/mesos/parallel.py
Apache-2.0
def get_num_masters() -> int: """Gets the number of masters from mesos state""" zookeeper_host_path = get_zookeeper_host_path() return get_number_of_mesos_masters( zookeeper_host_path.host, zookeeper_host_path.path )
Gets the number of masters from mesos state
get_num_masters
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_mesos_cpu_status( metrics: MesosMetrics, mesos_state: MesosState ) -> Tuple[int, int, int]: """Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available CPUs. """ total = metrics["master/cpus_total"] used = metrics["master/cpus_used"] for slave in mesos_state["slaves"]: used += reserved_maintenence_resources(slave["reserved_resources"])["cpus"] available = total - used return total, used, available
Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available CPUs.
get_mesos_cpu_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_cpu_status( nodes: Sequence[V1Node], ) -> Tuple[float, float, float]: """Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available CPUs. """ total = 0.0 available = 0.0 for node in nodes: available += suffixed_number_value(node.status.allocatable["cpu"]) total += suffixed_number_value(node.status.capacity["cpu"]) used = total - available return total, used, available
Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available CPUs.
get_kube_cpu_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_mesos_memory_status( metrics: MesosMetrics, mesos_state: MesosState ) -> Tuple[int, int, int]: """Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available memory in Mi. """ total = metrics["master/mem_total"] used = metrics["master/mem_used"] for slave in mesos_state["slaves"]: used += reserved_maintenence_resources(slave["reserved_resources"])["mem"] available = total - used return total, used, available
Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available memory in Mi.
get_mesos_memory_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_memory_status( nodes: Sequence[V1Node], ) -> Tuple[float, float, float]: """Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available memory in Mi. """ total = 0.0 available = 0.0 for node in nodes: available += suffixed_number_value(node.status.allocatable["memory"]) total += suffixed_number_value(node.status.capacity["memory"]) total //= 1024 * 1024 available //= 1024 * 1024 used = total - available return total, used, available
Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available memory in Mi.
get_kube_memory_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_mesos_disk_status( metrics: MesosMetrics, mesos_state: MesosState ) -> Tuple[int, int, int]: """Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available disk space in Mi. """ total = metrics["master/disk_total"] used = metrics["master/disk_used"] for slave in mesos_state["slaves"]: used += reserved_maintenence_resources(slave["reserved_resources"])["disk"] available = total - used return total, used, available
Takes in the mesos metrics and analyzes them, returning the status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available disk space in Mi.
get_mesos_disk_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_disk_status( nodes: Sequence[V1Node], ) -> Tuple[float, float, float]: """Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available disk space in Mi. """ total = 0.0 available = 0.0 for node in nodes: available += suffixed_number_value(node.status.allocatable["ephemeral-storage"]) total += suffixed_number_value(node.status.capacity["ephemeral-storage"]) total //= 1024 * 1024 available //= 1024 * 1024 used = total - available return total, used, available
Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available disk space in Mi.
get_kube_disk_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_mesos_gpu_status( metrics: MesosMetrics, mesos_state: MesosState ) -> Tuple[int, int, int]: """Takes in the mesos metrics and analyzes them, returning gpus status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available GPUs. """ total = metrics["master/gpus_total"] used = metrics["master/gpus_used"] for slave in mesos_state["slaves"]: used += reserved_maintenence_resources(slave["reserved_resources"])["gpus"] available = total - used return total, used, available
Takes in the mesos metrics and analyzes them, returning gpus status. :param metrics: mesos metrics dictionary. :param mesos_state: mesos state dictionary. :returns: Tuple of total, used, and available GPUs.
get_mesos_gpu_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_gpu_status( nodes: Sequence[V1Node], ) -> Tuple[float, float, float]: """Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available GPUs. """ total = 0.0 available = 0.0 for node in nodes: available += suffixed_number_value( node.status.allocatable.get("nvidia.com/gpu", "0") ) total += suffixed_number_value(node.status.capacity.get("nvidia.com/gpu", "0")) used = total - available return total, used, available
Takes in the list of Kubernetes nodes and analyzes them, returning the status. :param nodes: list of Kubernetes nodes. :returns: Tuple of total, used, and available GPUs.
get_kube_gpu_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def healthcheck_result_for_resource_utilization( resource_utilization: ResourceUtilization, threshold: int ) -> HealthCheckResult: """Given a resource data dict, assert that cpu data is ok. :param resource_utilization: the resource_utilization tuple to check :returns: a HealthCheckResult """ try: utilization = percent_used( resource_utilization.total, resource_utilization.total - resource_utilization.free, ) except ZeroDivisionError: utilization = 0 message = "{}: {:.2f}/{:.2f}({:.2f}%) used. Threshold ({:.2f}%)".format( resource_utilization.metric, float(resource_utilization.total - resource_utilization.free), resource_utilization.total, utilization, threshold, ) healthy = utilization <= threshold return HealthCheckResult(message=message, healthy=healthy)
Given a resource data dict, assert that cpu data is ok. :param resource_utilization: the resource_utilization tuple to check :returns: a HealthCheckResult
healthcheck_result_for_resource_utilization
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def key_func_for_attribute( attribute: str, ) -> Callable[[_SlaveT], str]: """Return a closure that given a slave, will return the value of a specific attribute. :param attribute: the attribute to inspect in the slave :returns: a closure, which takes a slave and returns the value of an attribute """ def key_func(slave): return slave["attributes"].get(attribute, "unknown") return key_func
Return a closure that given a slave, will return the value of a specific attribute. :param attribute: the attribute to inspect in the slave :returns: a closure, which takes a slave and returns the value of an attribute
key_func_for_attribute
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def key_func_for_attribute_multi( attributes: Sequence[str], ) -> _GenericNodeGroupingFunctionT: """Return a closure that given a slave, will return the value of a list of attributes, compiled into a hashable tuple :param attributes: the attributes to inspect in the slave :returns: a closure, which takes a slave and returns the value of those attributes """ def get_attribute(slave, attribute): if attribute == "hostname": return slave["hostname"] else: return slave["attributes"].get(attribute, "unknown") def key_func(slave): return tuple((a, get_attribute(slave, a)) for a in attributes) return key_func
Return a closure that given a slave, will return the value of a list of attributes, compiled into a hashable tuple :param attributes: the attributes to inspect in the slave :returns: a closure, which takes a slave and returns the value of those attributes
key_func_for_attribute_multi
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def key_func_for_attribute_multi_kube( attributes: Sequence[str], ) -> Callable[[V1Node], _KeyFuncRetT]: """Return a closure that given a node, will return the value of a list of attributes, compiled into a hashable tuple :param attributes: the attributes to inspect in the slave :returns: a closure, which takes a node and returns the value of those attributes """ def get_attribute(node, attribute): return node.metadata.labels.get(paasta_prefixed(attribute), "unknown") def key_func(node): return tuple((a, get_attribute(node, a)) for a in attributes) return key_func
Return a closure that given a node, will return the value of a list of attributes, compiled into a hashable tuple :param attributes: the attributes to inspect in the slave :returns: a closure, which takes a node and returns the value of those attributes
key_func_for_attribute_multi_kube
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def group_slaves_by_key_func( key_func: _GenericNodeGroupingFunctionT, slaves: Sequence[_GenericNodeT], sort_func: _GenericNodeSortFunctionT = None, ) -> Mapping[_KeyFuncRetT, Sequence[_GenericNodeT]]: """Given a function for grouping slaves, return a dict where keys are the unique values returned by the key_func and the values are all those slaves which have that specific value. :param key_func: a function which consumes a slave and returns a value :param slaves: a list of slaves :returns: a dict of key: [slaves] """ sorted_slaves: Sequence[_GenericNodeT] if sort_func is None: sorted_slaves = sorted(slaves, key=key_func) else: sorted_slaves = sort_func(slaves) return {k: list(v) for k, v in itertools.groupby(sorted_slaves, key=key_func)}
Given a function for grouping slaves, return a dict where keys are the unique values returned by the key_func and the values are all those slaves which have that specific value. :param key_func: a function which consumes a slave and returns a value :param slaves: a list of slaves :returns: a dict of key: [slaves]
group_slaves_by_key_func
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def calculate_resource_utilization_for_slaves( slaves: Sequence[_SlaveT], tasks: Sequence[MesosTask] ) -> ResourceUtilizationDict: """Given a list of slaves and a list of tasks, calculate the total available resource available in that list of slaves, and the resources consumed by tasks running on those slaves. :param slaves: a list of slaves to calculate resource usage for :param tasks: the list of tasks running in the mesos cluster :returns: a dict, containing keys for "free" and "total" resources. Each of these keys is a ResourceInfo tuple, exposing a number for cpu, disk and mem. """ resource_total_dict: _Counter[str] = Counter() for slave in slaves: filtered_resources = filter_mesos_state_metrics(slave["resources"]) resource_total_dict.update(Counter(filtered_resources)) resource_free_dict = copy.deepcopy(resource_total_dict) for task in tasks: task_resources = task["resources"] resource_free_dict.subtract(Counter(filter_mesos_state_metrics(task_resources))) for slave in slaves: filtered_resources = filter_mesos_state_metrics( reserved_maintenence_resources(slave["reserved_resources"]) ) resource_free_dict.subtract(Counter(filtered_resources)) return { "free": ResourceInfo( cpus=resource_free_dict["cpus"], disk=resource_free_dict["disk"], mem=resource_free_dict["mem"], gpus=resource_free_dict.get("gpus", 0), ), "total": ResourceInfo( cpus=resource_total_dict["cpus"], disk=resource_total_dict["disk"], mem=resource_total_dict["mem"], gpus=resource_total_dict.get("gpus", 0), ), "slave_count": len(slaves), }
Given a list of slaves and a list of tasks, calculate the total available resource available in that list of slaves, and the resources consumed by tasks running on those slaves. :param slaves: a list of slaves to calculate resource usage for :param tasks: the list of tasks running in the mesos cluster :returns: a dict, containing keys for "free" and "total" resources. Each of these keys is a ResourceInfo tuple, exposing a number for cpu, disk and mem.
calculate_resource_utilization_for_slaves
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def calculate_resource_utilization_for_kube_nodes( nodes: Sequence[V1Node], pods_by_node: Mapping[str, Sequence[V1Pod]], ) -> ResourceUtilizationDict: """Given a list of Kubernetes nodes, calculate the total available resource available and the resources consumed in that list of nodes. :param nodes: a list of Kubernetes nodes to calculate resource usage for :returns: a dict, containing keys for "free" and "total" resources. Each of these keys is a ResourceInfo tuple, exposing a number for cpu, disk and mem. """ resource_total_dict: _Counter[str] = Counter() resource_free_dict: _Counter[str] = Counter() for node in nodes: allocatable_resources = suffixed_number_dict_values( filter_kube_resources(node.status.allocatable) ) resource_total_dict.update(Counter(allocatable_resources)) allocated_resources = allocated_node_resources(pods_by_node[node.metadata.name]) resource_free_dict.update( Counter( { "cpu": allocatable_resources["cpu"] - allocated_resources["cpu"], "ephemeral-storage": allocatable_resources["ephemeral-storage"] - allocated_resources["ephemeral-storage"], "memory": allocatable_resources["memory"] - allocated_resources["memory"], } ) ) return { "free": ResourceInfo( cpus=resource_free_dict["cpu"], disk=resource_free_dict["ephemeral-storage"] / (1024**2), mem=resource_free_dict["memory"] / (1024**2), gpus=resource_free_dict.get("nvidia.com/gpu", 0), ), "total": ResourceInfo( cpus=resource_total_dict["cpu"], disk=resource_total_dict["ephemeral-storage"] / (1024**2), mem=resource_total_dict["memory"] / (1024**2), gpus=resource_total_dict.get("nvidia.com/gpu", 0), ), "slave_count": len(nodes), }
Given a list of Kubernetes nodes, calculate the total available resource available and the resources consumed in that list of nodes. :param nodes: a list of Kubernetes nodes to calculate resource usage for :returns: a dict, containing keys for "free" and "total" resources. Each of these keys is a ResourceInfo tuple, exposing a number for cpu, disk and mem.
calculate_resource_utilization_for_kube_nodes
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def filter_tasks_for_slaves( slaves: Sequence[_SlaveT], tasks: Sequence[MesosTask] ) -> Sequence[MesosTask]: """Given a list of slaves and a list of tasks, return a filtered list of tasks, where those returned belong to slaves in the list of slaves :param slaves: the list of slaves which the tasks provided should be running on. :param tasks: the tasks to filter :returns: a list of tasks, identical to that provided by the tasks param, but with only those where the task is running on one of the provided slaves included. """ slave_ids = [slave["id"] for slave in slaves] return [task for task in tasks if task["slave_id"] in slave_ids]
Given a list of slaves and a list of tasks, return a filtered list of tasks, where those returned belong to slaves in the list of slaves :param slaves: the list of slaves which the tasks provided should be running on. :param tasks: the tasks to filter :returns: a list of tasks, identical to that provided by the tasks param, but with only those where the task is running on one of the provided slaves included.
filter_tasks_for_slaves
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def filter_slaves( slaves: Sequence[_GenericNodeT], filters: Sequence[_GenericNodeFilterFunctionT] ) -> Sequence[_GenericNodeT]: """Filter slaves by attributes :param slaves: list of slaves to filter :param filters: list of functions that take a slave and return whether the slave should be included :returns: list of slaves that return true for all the filters """ if filters is None: return slaves return [s for s in slaves if all([f(s) for f in filters])]
Filter slaves by attributes :param slaves: list of slaves to filter :param filters: list of functions that take a slave and return whether the slave should be included :returns: list of slaves that return true for all the filters
filter_slaves
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_resource_utilization_by_grouping( grouping_func: _GenericNodeGroupingFunctionT, mesos_state: MesosState, filters: Sequence[_GenericNodeFilterFunctionT] = [], sort_func: _GenericNodeSortFunctionT = None, ) -> Mapping[_KeyFuncRetT, ResourceUtilizationDict]: """Given a function used to group slaves and mesos state, calculate resource utilization for each value of a given attribute. :grouping_func: a function that given a slave, will return the value of an attribute to group by. :param mesos_state: the mesos state :param filters: filters to apply to the slaves in the calculation, with filtering preformed by filter_slaves :param sort_func: a function that given a list of slaves, will return the sorted list of slaves. :returns: a dict of {attribute_value: resource_usage}, where resource usage is the dict returned by ``calculate_resource_utilization_for_slaves`` for slaves grouped by attribute value. """ slaves: Sequence[_SlaveT] = mesos_state.get("slaves", []) slaves = filter_slaves(slaves, filters) if not has_registered_slaves(mesos_state): raise ValueError("There are no slaves registered in the mesos state.") tasks = get_all_tasks_from_state(mesos_state, include_orphans=True) non_terminal_tasks = [task for task in tasks if not is_task_terminal(task)] slave_groupings = group_slaves_by_key_func(grouping_func, slaves, sort_func) return { attribute_value: calculate_resource_utilization_for_slaves( slaves=slaves, tasks=filter_tasks_for_slaves(slaves, non_terminal_tasks) ) for attribute_value, slaves in slave_groupings.items() }
Given a function used to group slaves and mesos state, calculate resource utilization for each value of a given attribute. :grouping_func: a function that given a slave, will return the value of an attribute to group by. :param mesos_state: the mesos state :param filters: filters to apply to the slaves in the calculation, with filtering preformed by filter_slaves :param sort_func: a function that given a list of slaves, will return the sorted list of slaves. :returns: a dict of {attribute_value: resource_usage}, where resource usage is the dict returned by ``calculate_resource_utilization_for_slaves`` for slaves grouped by attribute value.
get_resource_utilization_by_grouping
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_resource_utilization_by_grouping_kube( grouping_func: _GenericNodeGroupingFunctionT, kube_client: KubeClient, *, namespace: str, filters: Sequence[_GenericNodeFilterFunctionT] = [], sort_func: _GenericNodeSortFunctionT = None, ) -> Mapping[_KeyFuncRetT, ResourceUtilizationDict]: """Given a function used to group nodes, calculate resource utilization for each value of a given attribute. :grouping_func: a function that given a node, will return the value of an attribute to group by. :param kube_client: the Kubernetes client :param filters: filters to apply to the nodes in the calculation, with filtering preformed by filter_slaves :param sort_func: a function that given a list of nodes, will return the sorted list of nodes. :returns: a dict of {attribute_value: resource_usage}, where resource usage is the dict returned by ``calculate_resource_utilization_for_kube_nodes`` for nodes grouped by attribute value. """ nodes = get_all_nodes_cached(kube_client) nodes = filter_slaves(nodes, filters) if len(nodes) == 0: raise ValueError("There are no nodes registered in the Kubernetes.") node_groupings = group_slaves_by_key_func(grouping_func, nodes, sort_func) pods = get_all_pods_cached(kube_client, namespace) pods_by_node = {} for node in nodes: pods_by_node[node.metadata.name] = [ pod for pod in pods if pod.spec.node_name == node.metadata.name ] return { attribute_value: calculate_resource_utilization_for_kube_nodes( nodes, pods_by_node ) for attribute_value, nodes in node_groupings.items() }
Given a function used to group nodes, calculate resource utilization for each value of a given attribute. :grouping_func: a function that given a node, will return the value of an attribute to group by. :param kube_client: the Kubernetes client :param filters: filters to apply to the nodes in the calculation, with filtering preformed by filter_slaves :param sort_func: a function that given a list of nodes, will return the sorted list of nodes. :returns: a dict of {attribute_value: resource_usage}, where resource usage is the dict returned by ``calculate_resource_utilization_for_kube_nodes`` for nodes grouped by attribute value.
get_resource_utilization_by_grouping_kube
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def resource_utillizations_from_resource_info( total: ResourceInfo, free: ResourceInfo ) -> Sequence[ResourceUtilization]: """ Given two ResourceInfo tuples, one for total and one for free, create a ResourceUtilization tuple for each metric in the ResourceInfo. :param total: :param free: :returns: ResourceInfo for a metric """ return [ ResourceUtilization(metric=field, total=total[index], free=free[index]) for index, field in enumerate(ResourceInfo._fields) ]
Given two ResourceInfo tuples, one for total and one for free, create a ResourceUtilization tuple for each metric in the ResourceInfo. :param total: :param free: :returns: ResourceInfo for a metric
resource_utillizations_from_resource_info
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def has_registered_slaves( mesos_state: MesosState, ) -> bool: """Return a boolean indicating if there are any slaves registered to the master according to the mesos state. :param mesos_state: the mesos state from the master :returns: a boolean, indicating if there are > 0 slaves """ return len(mesos_state.get("slaves", [])) > 0
Return a boolean indicating if there are any slaves registered to the master according to the mesos state. :param mesos_state: the mesos state from the master :returns: a boolean, indicating if there are > 0 slaves
has_registered_slaves
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_resource_utilization_health( kube_client: KubeClient, ) -> Sequence[HealthCheckResult]: """Perform healthchecks against Kubernetes. :param kube_client: the KUbernetes client :returns: a list of HealthCheckResult tuples """ nodes = get_all_nodes_cached(kube_client) return [ assert_cpu_health(get_kube_cpu_status(nodes)), assert_memory_health(get_kube_memory_status(nodes)), assert_disk_health(get_kube_disk_status(nodes)), assert_gpu_health(get_kube_gpu_status(nodes)), assert_nodes_health(get_kube_nodes_health_status(nodes)), ]
Perform healthchecks against Kubernetes. :param kube_client: the KUbernetes client :returns: a list of HealthCheckResult tuples
get_kube_resource_utilization_health
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_kube_status( kube_client: KubeClient, namespace: str ) -> Sequence[HealthCheckResult]: """Gather information about Kubernetes. :param kube_client: the KUbernetes client :return: string containing the status """ return run_healthchecks_with_param( [kube_client, namespace], [assert_kube_deployments, assert_kube_pods_running] )
Gather information about Kubernetes. :param kube_client: the KUbernetes client :return: string containing the status
get_kube_status
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def critical_events_in_outputs(healthcheck_outputs): """Given a list of HealthCheckResults return those which are unhealthy.""" return [ healthcheck for healthcheck in healthcheck_outputs if healthcheck.healthy is False ]
Given a list of HealthCheckResults return those which are unhealthy.
critical_events_in_outputs
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def generate_summary_for_check(name, ok): """Given a check name and a boolean indicating if the service is OK, return a formatted message. """ status = PaastaColors.green("OK") if ok is True else PaastaColors.red("CRITICAL") summary = f"{name} Status: {status}" return summary
Given a check name and a boolean indicating if the service is OK, return a formatted message.
generate_summary_for_check
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def healthcheck_result_resource_utilization_pair_for_resource_utilization( utilization, threshold ): """Given a ResourceUtilization, produce a tuple of (HealthCheckResult, ResourceUtilization), where that HealthCheckResult describes the 'health' of a given utilization. :param utilization: a ResourceUtilization tuple :param threshold: a threshold which decides the health of the given ResourceUtilization :returns: a tuple of (HealthCheckResult, ResourceUtilization) """ return ( healthcheck_result_for_resource_utilization(utilization, threshold), utilization, )
Given a ResourceUtilization, produce a tuple of (HealthCheckResult, ResourceUtilization), where that HealthCheckResult describes the 'health' of a given utilization. :param utilization: a ResourceUtilization tuple :param threshold: a threshold which decides the health of the given ResourceUtilization :returns: a tuple of (HealthCheckResult, ResourceUtilization)
healthcheck_result_resource_utilization_pair_for_resource_utilization
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def format_table_column_for_healthcheck_resource_utilization_pair( healthcheck_utilization_pair, ): """Given a tuple of (HealthCheckResult, ResourceUtilization), return a string representation of the ResourceUtilization such that it is formatted according to the value of HealthCheckResult.healthy. :param healthcheck_utilization_pair: a tuple of (HealthCheckResult, ResourceUtilization) :returns: a string representing the ResourceUtilization. """ color_func = ( PaastaColors.green if healthcheck_utilization_pair[0].healthy else PaastaColors.red ) utilization = ( healthcheck_utilization_pair[1].total - healthcheck_utilization_pair[1].free ) if int(healthcheck_utilization_pair[1].total) == 0: utilization_perc = 100 else: utilization_perc = ( utilization / float(healthcheck_utilization_pair[1].total) * 100 ) if healthcheck_utilization_pair[1].metric not in ["cpus", "gpus"]: return color_func( "{}/{} ({:.2f}%)".format( naturalsize(utilization * 1024 * 1024, gnu=True), naturalsize( healthcheck_utilization_pair[1].total * 1024 * 1024, gnu=True ), utilization_perc, ) ) else: return color_func( "{:.2f}/{:.0f} ({:.2f}%)".format( utilization, healthcheck_utilization_pair[1].total, utilization_perc ) )
Given a tuple of (HealthCheckResult, ResourceUtilization), return a string representation of the ResourceUtilization such that it is formatted according to the value of HealthCheckResult.healthy. :param healthcheck_utilization_pair: a tuple of (HealthCheckResult, ResourceUtilization) :returns: a string representing the ResourceUtilization.
format_table_column_for_healthcheck_resource_utilization_pair
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def format_row_for_resource_utilization_healthchecks(healthcheck_utilization_pairs): """Given a list of (HealthCheckResult, ResourceUtilization) tuples, return a list with each of those tuples represented by a formatted string. :param healthcheck_utilization_pairs: a list of (HealthCheckResult, ResourceUtilization) tuples. :returns: a list containing a string representation of each (HealthCheckResult, ResourceUtilization) tuple. """ return [ format_table_column_for_healthcheck_resource_utilization_pair(pair) for pair in healthcheck_utilization_pairs ]
Given a list of (HealthCheckResult, ResourceUtilization) tuples, return a list with each of those tuples represented by a formatted string. :param healthcheck_utilization_pairs: a list of (HealthCheckResult, ResourceUtilization) tuples. :returns: a list containing a string representation of each (HealthCheckResult, ResourceUtilization) tuple.
format_row_for_resource_utilization_healthchecks
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def get_table_rows_for_resource_info_dict( attribute_values, healthcheck_utilization_pairs ): """A wrapper method to join together :param attribute: The attribute value and formatted columns to be shown in a single row. :param attribute_value: The value of the attribute associated with the row. This becomes index 0 in the array returned. :param healthcheck_utilization_pairs: a list of 2-tuples, where each tuple has the elements (HealthCheckResult, ResourceUtilization) :returns: a list of strings, representing a row in a table to be formatted. """ return attribute_values + format_row_for_resource_utilization_healthchecks( healthcheck_utilization_pairs )
A wrapper method to join together :param attribute: The attribute value and formatted columns to be shown in a single row. :param attribute_value: The value of the attribute associated with the row. This becomes index 0 in the array returned. :param healthcheck_utilization_pairs: a list of 2-tuples, where each tuple has the elements (HealthCheckResult, ResourceUtilization) :returns: a list of strings, representing a row in a table to be formatted.
get_table_rows_for_resource_info_dict
python
Yelp/paasta
paasta_tools/metrics/metastatus_lib.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/metrics/metastatus_lib.py
Apache-2.0
def pool(self): """Create thread pool on first request avoids instantiating unused threadpool for blocking clients. """ if self._pool is None: atexit.register(self.close) self._pool = ThreadPool(self.pool_threads) return self._pool
Create thread pool on first request avoids instantiating unused threadpool for blocking clients.
pool
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def sanitize_for_serialization(cls, obj): """Builds a JSON POST object. If obj is None, return None. If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date convert to string in iso8601 format. If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. :param obj: The data to serialize. :return: The serialized form of data. """ if isinstance(obj, (ModelNormal, ModelComposed)): return { key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items() } elif isinstance(obj, (str, int, float, none_type, bool)): return obj elif isinstance(obj, (datetime, date)): return obj.isoformat() elif isinstance(obj, ModelSimple): return cls.sanitize_for_serialization(obj.value) elif isinstance(obj, (list, tuple)): return [cls.sanitize_for_serialization(item) for item in obj] if isinstance(obj, dict): return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()} raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
Builds a JSON POST object. If obj is None, return None. If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date convert to string in iso8601 format. If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. :param obj: The data to serialize. :return: The serialized form of data.
sanitize_for_serialization
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def deserialize(self, response, response_type, _check_type): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. :param response_type: For the response, a tuple containing: valid classes a list containing valid classes (for list schemas) a dict containing a tuple of valid classes as the value Example values: (str,) (Pet,) (float, none_type) ([int, none_type],) ({str: (bool, str, int, float, date, datetime, str, none_type)},) :param _check_type: boolean, whether to check the types of the data received from the server :type _check_type: bool :return: deserialized object. """ # handle file downloading # save response body into a tmp file and return the instance if response_type == (file_type,): content_disposition = response.getheader("Content-Disposition") return deserialize_file(response.data, self.configuration, content_disposition=content_disposition) # fetch data from response object try: received_data = json.loads(response.data) except ValueError: received_data = response.data # store our data under the key of 'received_data' so users have some # context if they are deserializing a string and the data type is wrong deserialized_data = validate_and_convert_types( received_data, response_type, ['received_data'], True, _check_type, configuration=self.configuration ) return deserialized_data
Deserializes response into an object. :param response: RESTResponse object to be deserialized. :param response_type: For the response, a tuple containing: valid classes a list containing valid classes (for list schemas) a dict containing a tuple of valid classes as the value Example values: (str,) (Pet,) (float, none_type) ([int, none_type],) ({str: (bool, str, int, float, date, datetime, str, none_type)},) :param _check_type: boolean, whether to check the types of the data received from the server :type _check_type: bool :return: deserialized object.
deserialize
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def call_api( self, resource_path: str, method: str, path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, body: typing.Optional[typing.Any] = None, post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, response_type: typing.Optional[typing.Tuple[typing.Any]] = None, auth_settings: typing.Optional[typing.List[str]] = None, async_req: typing.Optional[bool] = None, _return_http_data_only: typing.Optional[bool] = None, collection_formats: typing.Optional[typing.Dict[str, str]] = None, _preload_content: bool = True, _request_timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, _host: typing.Optional[str] = None, _check_type: typing.Optional[bool] = None ): """Makes the HTTP request (synchronous) and returns deserialized data. To make an async_req request, set the async_req parameter. :param resource_path: Path to method endpoint. :param method: Method to call. :param path_params: Path parameters in the url. :param query_params: Query parameters in the url. :param header_params: Header parameters to be placed in the request header. :param body: Request body. :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the request. :param response_type: For the response, a tuple containing: valid classes a list containing valid classes (for list schemas) a dict containing a tuple of valid classes as the value Example values: (str,) (Pet,) (float, none_type) ([int, none_type],) ({str: (bool, str, int, float, date, datetime, str, none_type)},) :param files: key -> field name, value -> a list of open file objects for `multipart/form-data`. :type files: dict :param async_req bool: execute request asynchronously :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param collection_formats: dict of collection formats for path, query, header, and post parameters. :type collection_formats: dict, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _check_type: boolean describing if the data back from the server should have its type checked. :type _check_type: bool, optional :return: If async_req parameter is True, the request will be called asynchronously. The method will return the request thread. If parameter async_req is False or missing, then the method will return the response directly. """ if not async_req: return self.__call_api(resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, auth_settings, _return_http_data_only, collection_formats, _preload_content, _request_timeout, _host, _check_type) return self.pool.apply_async(self.__call_api, (resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, auth_settings, _return_http_data_only, collection_formats, _preload_content, _request_timeout, _host, _check_type))
Makes the HTTP request (synchronous) and returns deserialized data. To make an async_req request, set the async_req parameter. :param resource_path: Path to method endpoint. :param method: Method to call. :param path_params: Path parameters in the url. :param query_params: Query parameters in the url. :param header_params: Header parameters to be placed in the request header. :param body: Request body. :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the request. :param response_type: For the response, a tuple containing: valid classes a list containing valid classes (for list schemas) a dict containing a tuple of valid classes as the value Example values: (str,) (Pet,) (float, none_type) ([int, none_type],) ({str: (bool, str, int, float, date, datetime, str, none_type)},) :param files: key -> field name, value -> a list of open file objects for `multipart/form-data`. :type files: dict :param async_req bool: execute request asynchronously :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param collection_formats: dict of collection formats for path, query, header, and post parameters. :type collection_formats: dict, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _check_type: boolean describing if the data back from the server should have its type checked. :type _check_type: bool, optional :return: If async_req parameter is True, the request will be called asynchronously. The method will return the request thread. If parameter async_req is False or missing, then the method will return the response directly.
call_api
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def request(self, method, url, query_params=None, headers=None, post_params=None, body=None, _preload_content=True, _request_timeout=None): """Makes the HTTP request using RESTClient.""" if method == "GET": return self.rest_client.GET(url, query_params=query_params, _preload_content=_preload_content, _request_timeout=_request_timeout, headers=headers) elif method == "HEAD": return self.rest_client.HEAD(url, query_params=query_params, _preload_content=_preload_content, _request_timeout=_request_timeout, headers=headers) elif method == "OPTIONS": return self.rest_client.OPTIONS(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) elif method == "POST": return self.rest_client.POST(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) elif method == "PUT": return self.rest_client.PUT(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) elif method == "PATCH": return self.rest_client.PATCH(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) elif method == "DELETE": return self.rest_client.DELETE(url, query_params=query_params, headers=headers, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body) else: raise ApiValueError( "http method must be `GET`, `HEAD`, `OPTIONS`," " `POST`, `PATCH`, `PUT` or `DELETE`." )
Makes the HTTP request using RESTClient.
request
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def parameters_to_tuples(self, params, collection_formats): """Get parameters as list of tuples, formatting collections. :param params: Parameters as dict or list of two-tuples :param dict collection_formats: Parameter collection formats :return: Parameters as list of tuples, collections formatted """ new_params = [] if collection_formats is None: collection_formats = {} for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 if k in collection_formats: collection_format = collection_formats[k] if collection_format == 'multi': new_params.extend((k, value) for value in v) else: if collection_format == 'ssv': delimiter = ' ' elif collection_format == 'tsv': delimiter = '\t' elif collection_format == 'pipes': delimiter = '|' else: # csv is the default delimiter = ',' new_params.append( (k, delimiter.join(str(value) for value in v))) else: new_params.append((k, v)) return new_params
Get parameters as list of tuples, formatting collections. :param params: Parameters as dict or list of two-tuples :param dict collection_formats: Parameter collection formats :return: Parameters as list of tuples, collections formatted
parameters_to_tuples
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None): """Builds form parameters. :param files: None or a dict with key=param_name and value is a list of open file objects :return: List of tuples of form parameters with file data """ if files is None: return [] params = [] for param_name, file_instances in files.items(): if file_instances is None: # if the file field is nullable, skip None values continue for file_instance in file_instances: if file_instance is None: # if the file field is nullable, skip None values continue if file_instance.closed is True: raise ApiValueError( "Cannot read a closed file. The passed in file_type " "for %s must be open." % param_name ) filename = os.path.basename(file_instance.name) filedata = file_instance.read() mimetype = (mimetypes.guess_type(filename)[0] or 'application/octet-stream') params.append( tuple([param_name, tuple([filename, filedata, mimetype])])) file_instance.close() return params
Builds form parameters. :param files: None or a dict with key=param_name and value is a list of open file objects :return: List of tuples of form parameters with file data
files_parameters
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def select_header_accept(self, accepts): """Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). """ if not accepts: return accepts = [x.lower() for x in accepts] if 'application/json' in accepts: return 'application/json' else: return ', '.join(accepts)
Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json).
select_header_accept
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def select_header_content_type(self, content_types): """Returns `Content-Type` based on an array of content_types provided. :param content_types: List of content-types. :return: Content-Type (e.g. application/json). """ if not content_types: return 'application/json' content_types = [x.lower() for x in content_types] if 'application/json' in content_types or '*/*' in content_types: return 'application/json' else: return content_types[0]
Returns `Content-Type` based on an array of content_types provided. :param content_types: List of content-types. :return: Content-Type (e.g. application/json).
select_header_content_type
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def update_params_for_auth(self, headers, querys, auth_settings, resource_path, method, body): """Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. :param querys: Query parameters tuple list to be updated. :param auth_settings: Authentication setting identifiers list. :param resource_path: A string representation of the HTTP request resource path. :param method: A string representation of the HTTP request method. :param body: A object representing the body of the HTTP request. The object type is the return value of _encoder.default(). """ if not auth_settings: return for auth in auth_settings: auth_setting = self.configuration.auth_settings().get(auth) if auth_setting: if auth_setting['in'] == 'cookie': headers['Cookie'] = auth_setting['value'] elif auth_setting['in'] == 'header': if auth_setting['type'] != 'http-signature': headers[auth_setting['key']] = auth_setting['value'] elif auth_setting['in'] == 'query': querys.append((auth_setting['key'], auth_setting['value'])) else: raise ApiValueError( 'Authentication token must be in `query` or `header`' )
Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. :param querys: Query parameters tuple list to be updated. :param auth_settings: Authentication setting identifiers list. :param resource_path: A string representation of the HTTP request resource path. :param method: A string representation of the HTTP request method. :param body: A object representing the body of the HTTP request. The object type is the return value of _encoder.default().
update_params_for_auth
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def __init__(self, settings=None, params_map=None, root_map=None, headers_map=None, api_client=None, callable=None): """Creates an endpoint Args: settings (dict): see below key value pairs 'response_type' (tuple/None): response type 'auth' (list): a list of auth type keys 'endpoint_path' (str): the endpoint path 'operation_id' (str): endpoint string identifier 'http_method' (str): POST/PUT/PATCH/GET etc 'servers' (list): list of str servers that this endpoint is at params_map (dict): see below key value pairs 'all' (list): list of str endpoint parameter names 'required' (list): list of required parameter names 'nullable' (list): list of nullable parameter names 'enum' (list): list of parameters with enum values 'validation' (list): list of parameters with validations root_map 'validations' (dict): the dict mapping endpoint parameter tuple paths to their validation dictionaries 'allowed_values' (dict): the dict mapping endpoint parameter tuple paths to their allowed_values (enum) dictionaries 'openapi_types' (dict): param_name to openapi type 'attribute_map' (dict): param_name to camelCase name 'location_map' (dict): param_name to 'body', 'file', 'form', 'header', 'path', 'query' collection_format_map (dict): param_name to `csv` etc. headers_map (dict): see below key value pairs 'accept' (list): list of Accept header strings 'content_type' (list): list of Content-Type header strings api_client (ApiClient) api client instance callable (function): the function which is invoked when the Endpoint is called """ self.settings = settings self.params_map = params_map self.params_map['all'].extend([ 'async_req', '_host_index', '_preload_content', '_request_timeout', '_return_http_data_only', '_check_input_type', '_check_return_type' ]) self.params_map['nullable'].extend(['_request_timeout']) self.validations = root_map['validations'] self.allowed_values = root_map['allowed_values'] self.openapi_types = root_map['openapi_types'] extra_types = { 'async_req': (bool,), '_host_index': (none_type, int), '_preload_content': (bool,), '_request_timeout': (none_type, int, (int,), [int]), '_return_http_data_only': (bool,), '_check_input_type': (bool,), '_check_return_type': (bool,) } self.openapi_types.update(extra_types) self.attribute_map = root_map['attribute_map'] self.location_map = root_map['location_map'] self.collection_format_map = root_map['collection_format_map'] self.headers_map = headers_map self.api_client = api_client self.callable = callable
Creates an endpoint Args: settings (dict): see below key value pairs 'response_type' (tuple/None): response type 'auth' (list): a list of auth type keys 'endpoint_path' (str): the endpoint path 'operation_id' (str): endpoint string identifier 'http_method' (str): POST/PUT/PATCH/GET etc 'servers' (list): list of str servers that this endpoint is at params_map (dict): see below key value pairs 'all' (list): list of str endpoint parameter names 'required' (list): list of required parameter names 'nullable' (list): list of nullable parameter names 'enum' (list): list of parameters with enum values 'validation' (list): list of parameters with validations root_map 'validations' (dict): the dict mapping endpoint parameter tuple paths to their validation dictionaries 'allowed_values' (dict): the dict mapping endpoint parameter tuple paths to their allowed_values (enum) dictionaries 'openapi_types' (dict): param_name to openapi type 'attribute_map' (dict): param_name to camelCase name 'location_map' (dict): param_name to 'body', 'file', 'form', 'header', 'path', 'query' collection_format_map (dict): param_name to `csv` etc. headers_map (dict): see below key value pairs 'accept' (list): list of Accept header strings 'content_type' (list): list of Content-Type header strings api_client (ApiClient) api client instance callable (function): the function which is invoked when the Endpoint is called
__init__
python
Yelp/paasta
paasta_tools/paastaapi/api_client.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/api_client.py
Apache-2.0
def get_default_copy(cls): """Return new instance of configuration. This method returns newly created, based on default constructor, object of Configuration class or returns a copy of default configuration passed by the set_default method. :return: The configuration object. """ if cls._default is not None: return copy.deepcopy(cls._default) return Configuration()
Return new instance of configuration. This method returns newly created, based on default constructor, object of Configuration class or returns a copy of default configuration passed by the set_default method. :return: The configuration object.
get_default_copy
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def logger_file(self, value): """The logger file. If the logger_file is None, then add stream handler and remove file handler. Otherwise, add file handler and remove stream handler. :param value: The logger_file path. :type: str """ self.__logger_file = value if self.__logger_file: # If set logging file, # then add file handler and remove stream handler. self.logger_file_handler = logging.FileHandler(self.__logger_file) self.logger_file_handler.setFormatter(self.logger_formatter) for _, logger in self.logger.items(): logger.addHandler(self.logger_file_handler)
The logger file. If the logger_file is None, then add stream handler and remove file handler. Otherwise, add file handler and remove stream handler. :param value: The logger_file path. :type: str
logger_file
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def debug(self, value): """Debug status :param value: The debug status, True or False. :type: bool """ self.__debug = value if self.__debug: # if debug status is True, turn on debug logging for _, logger in self.logger.items(): logger.setLevel(logging.DEBUG) # turn on http_client debug http_client.HTTPConnection.debuglevel = 1 else: # if debug status is False, turn off debug logging, # setting log level to default `logging.WARNING` for _, logger in self.logger.items(): logger.setLevel(logging.WARNING) # turn off http_client debug http_client.HTTPConnection.debuglevel = 0
Debug status :param value: The debug status, True or False. :type: bool
debug
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def logger_format(self, value): """The logger format. The logger_formatter will be updated when sets logger_format. :param value: The format string. :type: str """ self.__logger_format = value self.logger_formatter = logging.Formatter(self.__logger_format)
The logger format. The logger_formatter will be updated when sets logger_format. :param value: The format string. :type: str
logger_format
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def get_api_key_with_prefix(self, identifier, alias=None): """Gets API key (with prefix if set). :param identifier: The identifier of apiKey. :param alias: The alternative identifier of apiKey. :return: The token for api key authentication. """ if self.refresh_api_key_hook is not None: self.refresh_api_key_hook(self) key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) if key: prefix = self.api_key_prefix.get(identifier) if prefix: return "%s %s" % (prefix, key) else: return key
Gets API key (with prefix if set). :param identifier: The identifier of apiKey. :param alias: The alternative identifier of apiKey. :return: The token for api key authentication.
get_api_key_with_prefix
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def get_basic_auth_token(self): """Gets HTTP basic authentication header (string). :return: The token for basic HTTP authentication. """ username = "" if self.username is not None: username = self.username password = "" if self.password is not None: password = self.password return urllib3.util.make_headers( basic_auth=username + ':' + password ).get('authorization')
Gets HTTP basic authentication header (string). :return: The token for basic HTTP authentication.
get_basic_auth_token
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def to_debug_report(self): """Gets the essential information for debugging. :return: The report for debugging. """ return "Python SDK Debug Report:\n"\ "OS: {env}\n"\ "Python Version: {pyversion}\n"\ "Version of the API: 1.2.0\n"\ "SDK Package Version: 1.0.0".\ format(env=sys.platform, pyversion=sys.version)
Gets the essential information for debugging. :return: The report for debugging.
to_debug_report
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def get_host_settings(self): """Gets an array of host settings :return: An array of host settings """ return [ { 'url': "{scheme}://{host}/{basePath}", 'description': "No description provided", 'variables': { 'basePath': { 'description': "No description provided", 'default_value': "v1", }, 'host': { 'description': "No description provided", 'default_value': "localhost", }, 'scheme': { 'description': "No description provided", 'default_value': "http", 'enum_values': [ "http", "https" ] } } } ]
Gets an array of host settings :return: An array of host settings
get_host_settings
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def get_host_from_settings(self, index, variables=None, servers=None): """Gets host URL based on the index and variables :param index: array index of the host settings :param variables: hash of variable and the corresponding value :param servers: an array of host settings or None :return: URL based on host settings """ if index is None: return self._base_path variables = {} if variables is None else variables servers = self.get_host_settings() if servers is None else servers try: server = servers[index] except IndexError: raise ValueError( "Invalid index {0} when selecting the host settings. " "Must be less than {1}".format(index, len(servers))) url = server['url'] # go through variables and replace placeholders for variable_name, variable in server.get('variables', {}).items(): used_value = variables.get( variable_name, variable['default_value']) if 'enum_values' in variable \ and used_value not in variable['enum_values']: raise ValueError( "The variable `{0}` in the host URL has invalid value " "{1}. Must be {2}.".format( variable_name, variables[variable_name], variable['enum_values'])) url = url.replace("{" + variable_name + "}", used_value) return url
Gets host URL based on the index and variables :param index: array index of the host settings :param variables: hash of variable and the corresponding value :param servers: an array of host settings or None :return: URL based on host settings
get_host_from_settings
python
Yelp/paasta
paasta_tools/paastaapi/configuration.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/configuration.py
Apache-2.0
def __init__(self, msg, path_to_item=None): """ Args: msg (str): the exception message Keyword Args: path_to_item (list) the path to the exception in the received_data dict. None if unset """ self.path_to_item = path_to_item full_msg = msg if path_to_item: full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) super(ApiValueError, self).__init__(full_msg)
Args: msg (str): the exception message Keyword Args: path_to_item (list) the path to the exception in the received_data dict. None if unset
__init__
python
Yelp/paasta
paasta_tools/paastaapi/exceptions.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/exceptions.py
Apache-2.0
def __init__(self, msg, path_to_item=None): """ Raised when an attribute reference or assignment fails. Args: msg (str): the exception message Keyword Args: path_to_item (None/list) the path to the exception in the received_data dict """ self.path_to_item = path_to_item full_msg = msg if path_to_item: full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) super(ApiAttributeError, self).__init__(full_msg)
Raised when an attribute reference or assignment fails. Args: msg (str): the exception message Keyword Args: path_to_item (None/list) the path to the exception in the received_data dict
__init__
python
Yelp/paasta
paasta_tools/paastaapi/exceptions.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/exceptions.py
Apache-2.0
def __init__(self, msg, path_to_item=None): """ Args: msg (str): the exception message Keyword Args: path_to_item (None/list) the path to the exception in the received_data dict """ self.path_to_item = path_to_item full_msg = msg if path_to_item: full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) super(ApiKeyError, self).__init__(full_msg)
Args: msg (str): the exception message Keyword Args: path_to_item (None/list) the path to the exception in the received_data dict
__init__
python
Yelp/paasta
paasta_tools/paastaapi/exceptions.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/exceptions.py
Apache-2.0
def allows_single_value_input(cls): """ This function returns True if the input composed schema model or any descendant model allows a value only input This is true for cases where oneOf contains items like: oneOf: - float - NumberWithValidation - StringEnum - ArrayModel - null TODO: lru_cache this """ if ( issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES ): return True elif issubclass(cls, ModelComposed): if not cls._composed_schemas['oneOf']: return False return any(allows_single_value_input(c) for c in cls._composed_schemas['oneOf']) return False
This function returns True if the input composed schema model or any descendant model allows a value only input This is true for cases where oneOf contains items like: oneOf: - float - NumberWithValidation - StringEnum - ArrayModel - null TODO: lru_cache this
allows_single_value_input
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def composed_model_input_classes(cls): """ This function returns a list of the possible models that can be accepted as inputs. TODO: lru_cache this """ if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: return [cls] elif issubclass(cls, ModelNormal): if cls.discriminator is None: return [cls] else: return get_discriminated_classes(cls) elif issubclass(cls, ModelComposed): if not cls._composed_schemas['oneOf']: return [] if cls.discriminator is None: input_classes = [] for c in cls._composed_schemas['oneOf']: input_classes.extend(composed_model_input_classes(c)) return input_classes else: return get_discriminated_classes(cls) return []
This function returns a list of the possible models that can be accepted as inputs. TODO: lru_cache this
composed_model_input_classes
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __setitem__(self, name, value): """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" if name in self.required_properties: self.__dict__[name] = value return self.set_attribute(name, value)
set the value of an attribute using square-bracket notation: `instance[attr] = val`
__setitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def get(self, name, default=None): """returns the value of an attribute or some default value if the attribute was not set""" if name in self.required_properties: return self.__dict__[name] return self.__dict__['_data_store'].get(name, default)
returns the value of an attribute or some default value if the attribute was not set
get
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __getitem__(self, name): """get the value of an attribute using square-bracket notation: `instance[attr]`""" if name in self: return self.get(name) raise ApiAttributeError( "{0} has no attribute '{1}'".format( type(self).__name__, name), [e for e in [self._path_to_item, name] if e] )
get the value of an attribute using square-bracket notation: `instance[attr]`
__getitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __contains__(self, name): """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" if name in self.required_properties: return name in self.__dict__ return name in self.__dict__['_data_store']
used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`
__contains__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, self.__class__): return False this_val = self._data_store['value'] that_val = other._data_store['value'] types = set() types.add(this_val.__class__) types.add(that_val.__class__) vals_equal = this_val == that_val return vals_equal
Returns true if both objects are equal
__eq__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __setitem__(self, name, value): """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" if name in self.required_properties: self.__dict__[name] = value return self.set_attribute(name, value)
set the value of an attribute using square-bracket notation: `instance[attr] = val`
__setitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def get(self, name, default=None): """returns the value of an attribute or some default value if the attribute was not set""" if name in self.required_properties: return self.__dict__[name] return self.__dict__['_data_store'].get(name, default)
returns the value of an attribute or some default value if the attribute was not set
get
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __getitem__(self, name): """get the value of an attribute using square-bracket notation: `instance[attr]`""" if name in self: return self.get(name) raise ApiAttributeError( "{0} has no attribute '{1}'".format( type(self).__name__, name), [e for e in [self._path_to_item, name] if e] )
get the value of an attribute using square-bracket notation: `instance[attr]`
__getitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __contains__(self, name): """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" if name in self.required_properties: return name in self.__dict__ return name in self.__dict__['_data_store']
used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`
__contains__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, self.__class__): return False if not set(self._data_store.keys()) == set(other._data_store.keys()): return False for _var_name, this_val in self._data_store.items(): that_val = other._data_store[_var_name] types = set() types.add(this_val.__class__) types.add(that_val.__class__) vals_equal = this_val == that_val if not vals_equal: return False return True
Returns true if both objects are equal
__eq__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __setitem__(self, name, value): """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" if name in self.required_properties: self.__dict__[name] = value return # set the attribute on the correct instance model_instances = self._var_name_to_model_instances.get( name, self._additional_properties_model_instances) if model_instances: for model_instance in model_instances: if model_instance == self: self.set_attribute(name, value) else: setattr(model_instance, name, value) if name not in self._var_name_to_model_instances: # we assigned an additional property self.__dict__['_var_name_to_model_instances'][name] = ( model_instance ) return None raise ApiAttributeError( "{0} has no attribute '{1}'".format( type(self).__name__, name), [e for e in [self._path_to_item, name] if e] )
set the value of an attribute using square-bracket notation: `instance[attr] = val`
__setitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def get(self, name, default=None): """returns the value of an attribute or some default value if the attribute was not set""" if name in self.required_properties: return self.__dict__[name] # get the attribute from the correct instance model_instances = self._var_name_to_model_instances.get( name, self._additional_properties_model_instances) values = [] # A composed model stores child (oneof/anyOf/allOf) models under # self._var_name_to_model_instances. A named property can exist in # multiple child models. If the property is present in more than one # child model, the value must be the same across all the child models. if model_instances: for model_instance in model_instances: if name in model_instance._data_store: v = model_instance._data_store[name] if v not in values: values.append(v) len_values = len(values) if len_values == 0: return default elif len_values == 1: return values[0] elif len_values > 1: raise ApiValueError( "Values stored for property {0} in {1} differ when looking " "at self and self's composed instances. All values must be " "the same".format(name, type(self).__name__), [e for e in [self._path_to_item, name] if e] )
returns the value of an attribute or some default value if the attribute was not set
get
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __getitem__(self, name): """get the value of an attribute using square-bracket notation: `instance[attr]`""" value = self.get(name, self.__unset_attribute_value__) if value is self.__unset_attribute_value__: raise ApiAttributeError( "{0} has no attribute '{1}'".format( type(self).__name__, name), [e for e in [self._path_to_item, name] if e] ) return value
get the value of an attribute using square-bracket notation: `instance[attr]`
__getitem__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __contains__(self, name): """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" if name in self.required_properties: return name in self.__dict__ model_instances = self._var_name_to_model_instances.get( name, self._additional_properties_model_instances) if model_instances: for model_instance in model_instances: if name in model_instance._data_store: return True return False
used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`
__contains__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, self.__class__): return False if not set(self._data_store.keys()) == set(other._data_store.keys()): return False for _var_name, this_val in self._data_store.items(): that_val = other._data_store[_var_name] types = set() types.add(this_val.__class__) types.add(that_val.__class__) vals_equal = this_val == that_val if not vals_equal: return False return True
Returns true if both objects are equal
__eq__
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def get_simple_class(input_value): """Returns an input_value's simple class that we will use for type checking Python2: float and int will return int, where int is the python3 int backport str and unicode will return str, where str is the python3 str backport Note: float and int ARE both instances of int backport Note: str_py2 and unicode_py2 are NOT both instances of str backport Args: input_value (class/class_instance): the item for which we will return the simple class """ if isinstance(input_value, type): # input_value is a class return input_value elif isinstance(input_value, tuple): return tuple elif isinstance(input_value, list): return list elif isinstance(input_value, dict): return dict elif isinstance(input_value, none_type): return none_type elif isinstance(input_value, file_type): return file_type elif isinstance(input_value, bool): # this must be higher than the int check because # isinstance(True, int) == True return bool elif isinstance(input_value, int): return int elif isinstance(input_value, datetime): # this must be higher than the date check because # isinstance(datetime_instance, date) == True return datetime elif isinstance(input_value, date): return date elif isinstance(input_value, str): return str return type(input_value)
Returns an input_value's simple class that we will use for type checking Python2: float and int will return int, where int is the python3 int backport str and unicode will return str, where str is the python3 str backport Note: float and int ARE both instances of int backport Note: str_py2 and unicode_py2 are NOT both instances of str backport Args: input_value (class/class_instance): the item for which we will return the simple class
get_simple_class
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def check_allowed_values(allowed_values, input_variable_path, input_values): """Raises an exception if the input_values are not allowed Args: allowed_values (dict): the allowed_values dict input_variable_path (tuple): the path to the input variable input_values (list/str/int/float/date/datetime): the values that we are checking to see if they are in allowed_values """ these_allowed_values = list(allowed_values[input_variable_path].values()) if (isinstance(input_values, list) and not set(input_values).issubset( set(these_allowed_values))): invalid_values = ", ".join( map(str, set(input_values) - set(these_allowed_values))), raise ApiValueError( "Invalid values for `%s` [%s], must be a subset of [%s]" % ( input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values)) ) ) elif (isinstance(input_values, dict) and not set( input_values.keys()).issubset(set(these_allowed_values))): invalid_values = ", ".join( map(str, set(input_values.keys()) - set(these_allowed_values))) raise ApiValueError( "Invalid keys in `%s` [%s], must be a subset of [%s]" % ( input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values)) ) ) elif (not isinstance(input_values, (list, dict)) and input_values not in these_allowed_values): raise ApiValueError( "Invalid value for `%s` (%s), must be one of %s" % ( input_variable_path[0], input_values, these_allowed_values ) )
Raises an exception if the input_values are not allowed Args: allowed_values (dict): the allowed_values dict input_variable_path (tuple): the path to the input variable input_values (list/str/int/float/date/datetime): the values that we are checking to see if they are in allowed_values
check_allowed_values
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def is_json_validation_enabled(schema_keyword, configuration=None): """Returns true if JSON schema validation is enabled for the specified validation keyword. This can be used to skip JSON schema structural validation as requested in the configuration. Args: schema_keyword (string): the name of a JSON schema validation keyword. configuration (Configuration): the configuration class. """ return (configuration is None or not hasattr(configuration, '_disabled_client_side_validations') or schema_keyword not in configuration._disabled_client_side_validations)
Returns true if JSON schema validation is enabled for the specified validation keyword. This can be used to skip JSON schema structural validation as requested in the configuration. Args: schema_keyword (string): the name of a JSON schema validation keyword. configuration (Configuration): the configuration class.
is_json_validation_enabled
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def check_validations( validations, input_variable_path, input_values, configuration=None): """Raises an exception if the input_values are invalid Args: validations (dict): the validation dictionary. input_variable_path (tuple): the path to the input variable. input_values (list/str/int/float/date/datetime): the values that we are checking. configuration (Configuration): the configuration class. """ current_validations = validations[input_variable_path] if (is_json_validation_enabled('multipleOf', configuration) and 'multiple_of' in current_validations and isinstance(input_values, (int, float)) and not (float(input_values) / current_validations['multiple_of']).is_integer()): # Note 'multipleOf' will be as good as the floating point arithmetic. raise ApiValueError( "Invalid value for `%s`, value must be a multiple of " "`%s`" % ( input_variable_path[0], current_validations['multiple_of'] ) ) if (is_json_validation_enabled('maxLength', configuration) and 'max_length' in current_validations and len(input_values) > current_validations['max_length']): raise ApiValueError( "Invalid value for `%s`, length must be less than or equal to " "`%s`" % ( input_variable_path[0], current_validations['max_length'] ) ) if (is_json_validation_enabled('minLength', configuration) and 'min_length' in current_validations and len(input_values) < current_validations['min_length']): raise ApiValueError( "Invalid value for `%s`, length must be greater than or equal to " "`%s`" % ( input_variable_path[0], current_validations['min_length'] ) ) if (is_json_validation_enabled('maxItems', configuration) and 'max_items' in current_validations and len(input_values) > current_validations['max_items']): raise ApiValueError( "Invalid value for `%s`, number of items must be less than or " "equal to `%s`" % ( input_variable_path[0], current_validations['max_items'] ) ) if (is_json_validation_enabled('minItems', configuration) and 'min_items' in current_validations and len(input_values) < current_validations['min_items']): raise ValueError( "Invalid value for `%s`, number of items must be greater than or " "equal to `%s`" % ( input_variable_path[0], current_validations['min_items'] ) ) items = ('exclusive_maximum', 'inclusive_maximum', 'exclusive_minimum', 'inclusive_minimum') if (any(item in current_validations for item in items)): if isinstance(input_values, list): max_val = max(input_values) min_val = min(input_values) elif isinstance(input_values, dict): max_val = max(input_values.values()) min_val = min(input_values.values()) else: max_val = input_values min_val = input_values if (is_json_validation_enabled('exclusiveMaximum', configuration) and 'exclusive_maximum' in current_validations and max_val >= current_validations['exclusive_maximum']): raise ApiValueError( "Invalid value for `%s`, must be a value less than `%s`" % ( input_variable_path[0], current_validations['exclusive_maximum'] ) ) if (is_json_validation_enabled('maximum', configuration) and 'inclusive_maximum' in current_validations and max_val > current_validations['inclusive_maximum']): raise ApiValueError( "Invalid value for `%s`, must be a value less than or equal to " "`%s`" % ( input_variable_path[0], current_validations['inclusive_maximum'] ) ) if (is_json_validation_enabled('exclusiveMinimum', configuration) and 'exclusive_minimum' in current_validations and min_val <= current_validations['exclusive_minimum']): raise ApiValueError( "Invalid value for `%s`, must be a value greater than `%s`" % ( input_variable_path[0], current_validations['exclusive_maximum'] ) ) if (is_json_validation_enabled('minimum', configuration) and 'inclusive_minimum' in current_validations and min_val < current_validations['inclusive_minimum']): raise ApiValueError( "Invalid value for `%s`, must be a value greater than or equal " "to `%s`" % ( input_variable_path[0], current_validations['inclusive_minimum'] ) ) flags = current_validations.get('regex', {}).get('flags', 0) if (is_json_validation_enabled('pattern', configuration) and 'regex' in current_validations and not re.search(current_validations['regex']['pattern'], input_values, flags=flags)): err_msg = r"Invalid value for `%s`, must match regular expression `%s`" % ( input_variable_path[0], current_validations['regex']['pattern'] ) if flags != 0: # Don't print the regex flags if the flags are not # specified in the OAS document. err_msg = r"%s with flags=`%s`" % (err_msg, flags) raise ApiValueError(err_msg)
Raises an exception if the input_values are invalid Args: validations (dict): the validation dictionary. input_variable_path (tuple): the path to the input variable. input_values (list/str/int/float/date/datetime): the values that we are checking. configuration (Configuration): the configuration class.
check_validations
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def order_response_types(required_types): """Returns the required types sorted in coercion order Args: required_types (list/tuple): collection of classes or instance of list or dict with class information inside it. Returns: (list): coercion order sorted collection of classes or instance of list or dict with class information inside it. """ def index_getter(class_or_instance): if isinstance(class_or_instance, list): return COERCION_INDEX_BY_TYPE[list] elif isinstance(class_or_instance, dict): return COERCION_INDEX_BY_TYPE[dict] elif (inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelComposed)): return COERCION_INDEX_BY_TYPE[ModelComposed] elif (inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelNormal)): return COERCION_INDEX_BY_TYPE[ModelNormal] elif (inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelSimple)): return COERCION_INDEX_BY_TYPE[ModelSimple] elif class_or_instance in COERCION_INDEX_BY_TYPE: return COERCION_INDEX_BY_TYPE[class_or_instance] raise ApiValueError("Unsupported type: %s" % class_or_instance) sorted_types = sorted( required_types, key=lambda class_or_instance: index_getter(class_or_instance) ) return sorted_types
Returns the required types sorted in coercion order Args: required_types (list/tuple): collection of classes or instance of list or dict with class information inside it. Returns: (list): coercion order sorted collection of classes or instance of list or dict with class information inside it.
order_response_types
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0
def get_discriminated_classes(cls): """ Returns all the classes that a discriminator converts to TODO: lru_cache this """ possible_classes = [] key = list(cls.discriminator.keys())[0] if is_type_nullable(cls): possible_classes.append(cls) for discr_cls in cls.discriminator[key].values(): if hasattr(discr_cls, 'discriminator') and discr_cls.discriminator is not None: possible_classes.extend(get_discriminated_classes(discr_cls)) else: possible_classes.append(discr_cls) return possible_classes
Returns all the classes that a discriminator converts to TODO: lru_cache this
get_discriminated_classes
python
Yelp/paasta
paasta_tools/paastaapi/model_utils.py
https://github.com/Yelp/paasta/blob/master/paasta_tools/paastaapi/model_utils.py
Apache-2.0