repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
reanahub/reana-commons
reana_commons/utils.py
calculate_job_input_hash
def calculate_job_input_hash(job_spec, workflow_json): """Calculate md5 hash of job specification and workflow json.""" if 'workflow_workspace' in job_spec: del job_spec['workflow_workspace'] job_md5_buffer = md5() job_md5_buffer.update(json.dumps(job_spec).encode('utf-8')) job_md5_buffer.update(json.dumps(workflow_json).encode('utf-8')) return job_md5_buffer.hexdigest()
python
def calculate_job_input_hash(job_spec, workflow_json): """Calculate md5 hash of job specification and workflow json.""" if 'workflow_workspace' in job_spec: del job_spec['workflow_workspace'] job_md5_buffer = md5() job_md5_buffer.update(json.dumps(job_spec).encode('utf-8')) job_md5_buffer.update(json.dumps(workflow_json).encode('utf-8')) return job_md5_buffer.hexdigest()
Calculate md5 hash of job specification and workflow json.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L78-L85
reanahub/reana-commons
reana_commons/utils.py
calculate_file_access_time
def calculate_file_access_time(workflow_workspace): """Calculate access times of files in workspace.""" access_times = {} for subdir, dirs, files in os.walk(workflow_workspace): for file in files: file_path = os.path.join(subdir, file) access_times[file_path] = os.stat(file_path).st_atime return access_times
python
def calculate_file_access_time(workflow_workspace): """Calculate access times of files in workspace.""" access_times = {} for subdir, dirs, files in os.walk(workflow_workspace): for file in files: file_path = os.path.join(subdir, file) access_times[file_path] = os.stat(file_path).st_atime return access_times
Calculate access times of files in workspace.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L88-L95
reanahub/reana-commons
reana_commons/utils.py
copy_openapi_specs
def copy_openapi_specs(output_path, component): """Copy generated and validated openapi specs to reana-commons module.""" if component == 'reana-server': file = 'reana_server.json' elif component == 'reana-workflow-controller': file = 'reana_workflow_controller.json' elif component == 'reana-job-controller': file = 'reana_job_controller.json' if os.environ.get('REANA_SRCDIR'): reana_srcdir = os.environ.get('REANA_SRCDIR') else: reana_srcdir = os.path.join('..') try: reana_commons_specs_path = os.path.join( reana_srcdir, 'reana-commons', 'reana_commons', 'openapi_specifications') if os.path.exists(reana_commons_specs_path): if os.path.isfile(output_path): shutil.copy(output_path, os.path.join(reana_commons_specs_path, file)) # copy openapi specs file as well to docs shutil.copy(output_path, os.path.join('docs', 'openapi.json')) except Exception as e: click.echo('Something went wrong, could not copy openapi ' 'specifications to reana-commons \n{0}'.format(e))
python
def copy_openapi_specs(output_path, component): """Copy generated and validated openapi specs to reana-commons module.""" if component == 'reana-server': file = 'reana_server.json' elif component == 'reana-workflow-controller': file = 'reana_workflow_controller.json' elif component == 'reana-job-controller': file = 'reana_job_controller.json' if os.environ.get('REANA_SRCDIR'): reana_srcdir = os.environ.get('REANA_SRCDIR') else: reana_srcdir = os.path.join('..') try: reana_commons_specs_path = os.path.join( reana_srcdir, 'reana-commons', 'reana_commons', 'openapi_specifications') if os.path.exists(reana_commons_specs_path): if os.path.isfile(output_path): shutil.copy(output_path, os.path.join(reana_commons_specs_path, file)) # copy openapi specs file as well to docs shutil.copy(output_path, os.path.join('docs', 'openapi.json')) except Exception as e: click.echo('Something went wrong, could not copy openapi ' 'specifications to reana-commons \n{0}'.format(e))
Copy generated and validated openapi specs to reana-commons module.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L98-L126
reanahub/reana-commons
reana_commons/utils.py
get_workflow_status_change_verb
def get_workflow_status_change_verb(status): """Give the correct verb conjugation depending on status tense. :param status: String which represents the status the workflow changed to. """ verb = '' if status.endswith('ing'): verb = 'is' elif status.endswith('ed'): verb = 'has been' else: raise ValueError('Unrecognised status {}'.format(status)) return verb
python
def get_workflow_status_change_verb(status): """Give the correct verb conjugation depending on status tense. :param status: String which represents the status the workflow changed to. """ verb = '' if status.endswith('ing'): verb = 'is' elif status.endswith('ed'): verb = 'has been' else: raise ValueError('Unrecognised status {}'.format(status)) return verb
Give the correct verb conjugation depending on status tense. :param status: String which represents the status the workflow changed to.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L129-L142
reanahub/reana-commons
reana_commons/utils.py
build_progress_message
def build_progress_message(total=None, running=None, finished=None, failed=None, cached=None): """Build the progress message with correct formatting.""" progress_message = {} if total: progress_message['total'] = total if running: progress_message['running'] = running if finished: progress_message['finished'] = finished if failed: progress_message['failed'] = failed if cached: progress_message['cached'] = cached return progress_message
python
def build_progress_message(total=None, running=None, finished=None, failed=None, cached=None): """Build the progress message with correct formatting.""" progress_message = {} if total: progress_message['total'] = total if running: progress_message['running'] = running if finished: progress_message['finished'] = finished if failed: progress_message['failed'] = failed if cached: progress_message['cached'] = cached return progress_message
Build the progress message with correct formatting.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L145-L162
reanahub/reana-commons
reana_commons/utils.py
build_caching_info_message
def build_caching_info_message(job_spec, job_id, workflow_workspace, workflow_json, result_path): """Build the caching info message with correct formatting.""" caching_info_message = { "job_spec": job_spec, "job_id": job_id, "workflow_workspace": workflow_workspace, "workflow_json": workflow_json, "result_path": result_path } return caching_info_message
python
def build_caching_info_message(job_spec, job_id, workflow_workspace, workflow_json, result_path): """Build the caching info message with correct formatting.""" caching_info_message = { "job_spec": job_spec, "job_id": job_id, "workflow_workspace": workflow_workspace, "workflow_json": workflow_json, "result_path": result_path } return caching_info_message
Build the caching info message with correct formatting.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L165-L178
reanahub/reana-commons
reana_commons/utils.py
get_workspace_disk_usage
def get_workspace_disk_usage(workspace, summarize=False): """Retrieve disk usage information of a workspace.""" command = ['du', '-h'] if summarize: command.append('-s') else: command.append('-a') command.append(workspace) disk_usage_info = subprocess.check_output(command).decode().split() # create pairs of (size, filename) filesize_pairs = list(zip(disk_usage_info[::2], disk_usage_info[1::2])) filesizes = [] for filesize_pair in filesize_pairs: size, name = filesize_pair # trim workspace path in every file name filesizes.append({'name': name[len(workspace):], 'size': size}) return filesizes
python
def get_workspace_disk_usage(workspace, summarize=False): """Retrieve disk usage information of a workspace.""" command = ['du', '-h'] if summarize: command.append('-s') else: command.append('-a') command.append(workspace) disk_usage_info = subprocess.check_output(command).decode().split() # create pairs of (size, filename) filesize_pairs = list(zip(disk_usage_info[::2], disk_usage_info[1::2])) filesizes = [] for filesize_pair in filesize_pairs: size, name = filesize_pair # trim workspace path in every file name filesizes.append({'name': name[len(workspace):], 'size': size}) return filesizes
Retrieve disk usage information of a workspace.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L181-L198
reanahub/reana-commons
reana_commons/utils.py
render_cvmfs_pvc
def render_cvmfs_pvc(cvmfs_volume): """Render REANA_CVMFS_PVC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_PVC_TEMPLATE) rendered_template['metadata']['name'] = 'csi-cvmfs-{}-pvc'.format(name) rendered_template['spec']['storageClassName'] = "csi-cvmfs-{}".format(name) return rendered_template
python
def render_cvmfs_pvc(cvmfs_volume): """Render REANA_CVMFS_PVC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_PVC_TEMPLATE) rendered_template['metadata']['name'] = 'csi-cvmfs-{}-pvc'.format(name) rendered_template['spec']['storageClassName'] = "csi-cvmfs-{}".format(name) return rendered_template
Render REANA_CVMFS_PVC_TEMPLATE.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L201-L207
reanahub/reana-commons
reana_commons/utils.py
render_cvmfs_sc
def render_cvmfs_sc(cvmfs_volume): """Render REANA_CVMFS_SC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_SC_TEMPLATE) rendered_template['metadata']['name'] = "csi-cvmfs-{}".format(name) rendered_template['parameters']['repository'] = cvmfs_volume return rendered_template
python
def render_cvmfs_sc(cvmfs_volume): """Render REANA_CVMFS_SC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_SC_TEMPLATE) rendered_template['metadata']['name'] = "csi-cvmfs-{}".format(name) rendered_template['parameters']['repository'] = cvmfs_volume return rendered_template
Render REANA_CVMFS_SC_TEMPLATE.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L210-L216
reanahub/reana-commons
reana_commons/utils.py
create_cvmfs_storage_class
def create_cvmfs_storage_class(cvmfs_volume): """Create CVMFS storage class.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_storagev1_api_client try: current_k8s_storagev1_api_client.\ create_storage_class( render_cvmfs_sc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
def create_cvmfs_storage_class(cvmfs_volume): """Create CVMFS storage class.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_storagev1_api_client try: current_k8s_storagev1_api_client.\ create_storage_class( render_cvmfs_sc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
Create CVMFS storage class.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L219-L231
reanahub/reana-commons
reana_commons/utils.py
create_cvmfs_persistent_volume_claim
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
Create CVMFS persistent volume claim.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L234-L247
reanahub/reana-commons
reana_commons/k8s/api_client.py
create_api_client
def create_api_client(api='BatchV1'): """Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1. """ k8s_config.load_incluster_config() api_configuration = client.Configuration() api_configuration.verify_ssl = False if api == 'extensions/v1beta1': api_client = client.ExtensionsV1beta1Api() elif api == 'CoreV1': api_client = client.CoreV1Api() elif api == 'StorageV1': api_client = client.StorageV1Api() else: api_client = client.BatchV1Api() return api_client
python
def create_api_client(api='BatchV1'): """Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1. """ k8s_config.load_incluster_config() api_configuration = client.Configuration() api_configuration.verify_ssl = False if api == 'extensions/v1beta1': api_client = client.ExtensionsV1beta1Api() elif api == 'CoreV1': api_client = client.CoreV1Api() elif api == 'StorageV1': api_client = client.StorageV1Api() else: api_client = client.BatchV1Api() return api_client
Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/k8s/api_client.py#L18-L36
reanahub/reana-commons
reana_commons/publisher.py
BasePublisher.__error_callback
def __error_callback(self, exception, interval): """Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried. """ logging.error('Error while publishing {}'.format( exception)) logging.info('Retry in %s seconds.', interval)
python
def __error_callback(self, exception, interval): """Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried. """ logging.error('Error while publishing {}'.format( exception)) logging.info('Retry in %s seconds.', interval)
Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L53-L63
reanahub/reana-commons
reana_commons/publisher.py
BasePublisher._publish
def _publish(self, msg): """Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON). """ connection = self._connection.clone() publish = connection.ensure(self.producer, self.producer.publish, errback=self.__error_callback, max_retries=MQ_PRODUCER_MAX_RETRIES) publish(json.dumps(msg), exchange=self._exchange, routing_key=self._routing_key, declare=[self._queue]) logging.debug('Publisher: message sent: %s', msg)
python
def _publish(self, msg): """Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON). """ connection = self._connection.clone() publish = connection.ensure(self.producer, self.producer.publish, errback=self.__error_callback, max_retries=MQ_PRODUCER_MAX_RETRIES) publish(json.dumps(msg), exchange=self._exchange, routing_key=self._routing_key, declare=[self._queue]) logging.debug('Publisher: message sent: %s', msg)
Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON).
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L65-L78
reanahub/reana-commons
reana_commons/publisher.py
WorkflowStatusPublisher.publish_workflow_status
def publish_workflow_status(self, workflow_uuid, status, logs='', message=None): """Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow. """ msg = { "workflow_uuid": workflow_uuid, "logs": logs, "status": status, "message": message } self._publish(msg)
python
def publish_workflow_status(self, workflow_uuid, status, logs='', message=None): """Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow. """ msg = { "workflow_uuid": workflow_uuid, "logs": logs, "status": status, "message": message } self._publish(msg)
Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L100-L118
reanahub/reana-commons
reana_commons/publisher.py
WorkflowSubmissionPublisher.publish_workflow_submission
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): """Publish workflow submission parameters.""" msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
python
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): """Publish workflow submission parameters.""" msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
Publish workflow submission parameters.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L133-L143
reanahub/reana-commons
reana_commons/serial.py
serial_load
def serial_load(workflow_file, specification, parameters=None, original=None): """Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded. """ parameters = parameters or {} if not specification: with open(workflow_file, 'r') as f: specification = json.loads(f.read()) expanded_specification = _expand_parameters(specification, parameters, original) validate(specification, serial_workflow_schema) return expanded_specification
python
def serial_load(workflow_file, specification, parameters=None, original=None): """Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded. """ parameters = parameters or {} if not specification: with open(workflow_file, 'r') as f: specification = json.loads(f.read()) expanded_specification = _expand_parameters(specification, parameters, original) validate(specification, serial_workflow_schema) return expanded_specification
Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/serial.py#L55-L75
reanahub/reana-commons
reana_commons/serial.py
_expand_parameters
def _expand_parameters(specification, parameters, original=None): """Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError """ expanded_specification = deepcopy(specification) try: for step_num, step in enumerate(expanded_specification['steps']): current_step = expanded_specification['steps'][step_num] for command_num, command in enumerate(step['commands']): current_step['commands'][command_num] = \ Template(command).substitute(parameters) # if call is done from client, original==True and original # specifications withtout applied parameters are returned. if original: return specification else: return expanded_specification except KeyError as e: raise ValidationError('Workflow parameter(s) could not ' 'be expanded. Please take a look ' 'to {params}'.format(params=str(e)))
python
def _expand_parameters(specification, parameters, original=None): """Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError """ expanded_specification = deepcopy(specification) try: for step_num, step in enumerate(expanded_specification['steps']): current_step = expanded_specification['steps'][step_num] for command_num, command in enumerate(step['commands']): current_step['commands'][command_num] = \ Template(command).substitute(parameters) # if call is done from client, original==True and original # specifications withtout applied parameters are returned. if original: return specification else: return expanded_specification except KeyError as e: raise ValidationError('Workflow parameter(s) could not ' 'be expanded. Please take a look ' 'to {params}'.format(params=str(e)))
Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/serial.py#L78-L108
reanahub/reana-commons
reana_commons/tasks.py
reana_ready
def reana_ready(): """Check if reana can start new workflows.""" from reana_commons.config import REANA_READY_CONDITIONS for module_name, condition_list in REANA_READY_CONDITIONS.items(): for condition_name in condition_list: module = importlib.import_module(module_name) condition_func = getattr(module, condition_name) if not condition_func(): return False return True
python
def reana_ready(): """Check if reana can start new workflows.""" from reana_commons.config import REANA_READY_CONDITIONS for module_name, condition_list in REANA_READY_CONDITIONS.items(): for condition_name in condition_list: module = importlib.import_module(module_name) condition_func = getattr(module, condition_name) if not condition_func(): return False return True
Check if reana can start new workflows.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L24-L33
reanahub/reana-commons
reana_commons/tasks.py
check_predefined_conditions
def check_predefined_conditions(): """Check k8s predefined conditions for the nodes.""" try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: # check based on the predefined conditions about the # node status: MemoryPressure, OutOfDisk, KubeletReady # DiskPressure, PIDPressure, for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('Something went wrong while getting node information.') log.error(e) return False return True
python
def check_predefined_conditions(): """Check k8s predefined conditions for the nodes.""" try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: # check based on the predefined conditions about the # node status: MemoryPressure, OutOfDisk, KubeletReady # DiskPressure, PIDPressure, for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('Something went wrong while getting node information.') log.error(e) return False return True
Check k8s predefined conditions for the nodes.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L36-L51
reanahub/reana-commons
reana_commons/tasks.py
check_running_job_count
def check_running_job_count(): """Check upper limit on running jobs.""" try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
python
def check_running_job_count(): """Check upper limit on running jobs.""" try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
Check upper limit on running jobs.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L54-L65
reanahub/reana-commons
reana_commons/api_client.py
BaseAPIClient._get_spec
def _get_spec(self, spec_file): """Get json specification from package data.""" spec_file_path = os.path.join( pkg_resources. resource_filename( 'reana_commons', 'openapi_specifications'), spec_file) with open(spec_file_path) as f: json_spec = json.load(f) return json_spec
python
def _get_spec(self, spec_file): """Get json specification from package data.""" spec_file_path = os.path.join( pkg_resources. resource_filename( 'reana_commons', 'openapi_specifications'), spec_file) with open(spec_file_path) as f: json_spec = json.load(f) return json_spec
Get json specification from package data.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L46-L57
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.submit
def submit(self, workflow_uuid='', experiment='', image='', cmd='', prettified_cmd='', workflow_workspace='', job_name='', cvmfs_mounts='false'): """Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``. """ job_spec = { 'experiment': experiment, 'docker_img': image, 'cmd': cmd, 'prettified_cmd': prettified_cmd, 'env_vars': {}, 'workflow_workspace': workflow_workspace, 'job_name': job_name, 'cvmfs_mounts': cvmfs_mounts, 'workflow_uuid': workflow_uuid } response, http_response = self._client.jobs.create_job(job=job_spec).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to create a job. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return response
python
def submit(self, workflow_uuid='', experiment='', image='', cmd='', prettified_cmd='', workflow_workspace='', job_name='', cvmfs_mounts='false'): """Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``. """ job_spec = { 'experiment': experiment, 'docker_img': image, 'cmd': cmd, 'prettified_cmd': prettified_cmd, 'env_vars': {}, 'workflow_workspace': workflow_workspace, 'job_name': job_name, 'cvmfs_mounts': cvmfs_mounts, 'workflow_uuid': workflow_uuid } response, http_response = self._client.jobs.create_job(job=job_spec).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to create a job. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return response
Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L63-L104
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.check_status
def check_status(self, job_id): """Check status of a job.""" response, http_response = self._client.jobs.get_job(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return response
python
def check_status(self, job_id): """Check status of a job.""" response, http_response = self._client.jobs.get_job(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return response
Check status of a job.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L106-L113
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.get_logs
def get_logs(self, job_id): """Get logs of a job.""" response, http_response = self._client.jobs.get_logs(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return http_response.text
python
def get_logs(self, job_id): """Get logs of a job.""" response, http_response = self._client.jobs.get_logs(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return http_response.text
Get logs of a job.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L115-L122
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.check_if_cached
def check_if_cached(self, job_spec, step, workflow_workspace): """Check if job result is in cache.""" response, http_response = self._client.job_cache.check_if_cached( job_spec=json.dumps(job_spec), workflow_json=json.dumps(step), workflow_workspace=workflow_workspace).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to check cache. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return http_response
python
def check_if_cached(self, job_spec, step, workflow_workspace): """Check if job result is in cache.""" response, http_response = self._client.job_cache.check_if_cached( job_spec=json.dumps(job_spec), workflow_json=json.dumps(step), workflow_workspace=workflow_workspace).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to check cache. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return http_response
Check if job result is in cache.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L124-L137
reanahub/reana-commons
reana_commons/k8s/volumes.py
get_shared_volume
def get_shared_volume(workflow_workspace, shared_volume_root): """Get shared CephFS/hostPath volume to a given job spec. :param workflow_workspace: Absolute path to the job's workflow workspace. :param shared_volume_root: Root path in the underlying storage backend. :returns: Tuple consisting of the Kubernetes volumeMount and the volume. """ workflow_workspace_relative_to_owner = workflow_workspace if os.path.isabs(workflow_workspace): workflow_workspace_relative_to_owner = \ os.path.relpath(workflow_workspace, shared_volume_root) mount_path = os.path.join(shared_volume_root, workflow_workspace_relative_to_owner) volume_mount = { "name": REANA_SHARED_VOLUME_NAME, "mountPath": mount_path, "subPath": workflow_workspace_relative_to_owner} if REANA_STORAGE_BACKEND == "CEPHFS": volume = get_k8s_cephfs_volume() else: volume = get_k8s_hostpath_volume(shared_volume_root) return volume_mount, volume
python
def get_shared_volume(workflow_workspace, shared_volume_root): """Get shared CephFS/hostPath volume to a given job spec. :param workflow_workspace: Absolute path to the job's workflow workspace. :param shared_volume_root: Root path in the underlying storage backend. :returns: Tuple consisting of the Kubernetes volumeMount and the volume. """ workflow_workspace_relative_to_owner = workflow_workspace if os.path.isabs(workflow_workspace): workflow_workspace_relative_to_owner = \ os.path.relpath(workflow_workspace, shared_volume_root) mount_path = os.path.join(shared_volume_root, workflow_workspace_relative_to_owner) volume_mount = { "name": REANA_SHARED_VOLUME_NAME, "mountPath": mount_path, "subPath": workflow_workspace_relative_to_owner} if REANA_STORAGE_BACKEND == "CEPHFS": volume = get_k8s_cephfs_volume() else: volume = get_k8s_hostpath_volume(shared_volume_root) return volume_mount, volume
Get shared CephFS/hostPath volume to a given job spec. :param workflow_workspace: Absolute path to the job's workflow workspace. :param shared_volume_root: Root path in the underlying storage backend. :returns: Tuple consisting of the Kubernetes volumeMount and the volume.
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/k8s/volumes.py#L64-L87
jbaiter/gphoto2-cffi
gphoto2cffi/backend.py
_logging_callback
def _logging_callback(level, domain, message, data): """ Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused) """ domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
python
def _logging_callback(level, domain, message, data): """ Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused) """ domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused)
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/backend.py#L75-L90
jazzband/django-queued-storage
queued_storage/tasks.py
Transfer.run
def run(self, name, cache_key, local_path, remote_path, local_options, remote_options, **kwargs): """ The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result """ local = import_attribute(local_path)(**local_options) remote = import_attribute(remote_path)(**remote_options) result = self.transfer(name, local, remote, **kwargs) if result is True: cache.set(cache_key, True) file_transferred.send(sender=self.__class__, name=name, local=local, remote=remote) elif result is False: args = [name, cache_key, local_path, remote_path, local_options, remote_options] self.retry(args=args, kwargs=kwargs) else: raise ValueError("Task '%s' did not return True/False but %s" % (self.__class__, result)) return result
python
def run(self, name, cache_key, local_path, remote_path, local_options, remote_options, **kwargs): """ The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result """ local = import_attribute(local_path)(**local_options) remote = import_attribute(remote_path)(**remote_options) result = self.transfer(name, local, remote, **kwargs) if result is True: cache.set(cache_key, True) file_transferred.send(sender=self.__class__, name=name, local=local, remote=remote) elif result is False: args = [name, cache_key, local_path, remote_path, local_options, remote_options] self.retry(args=args, kwargs=kwargs) else: raise ValueError("Task '%s' did not return True/False but %s" % (self.__class__, result)) return result
The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/tasks.py#L63-L100
jazzband/django-queued-storage
queued_storage/tasks.py
Transfer.transfer
def transfer(self, name, local, remote, **kwargs): """ Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool """ try: remote.save(name, local.open(name)) return True except Exception as e: logger.error("Unable to save '%s' to remote storage. " "About to retry." % name) logger.exception(e) return False
python
def transfer(self, name, local, remote, **kwargs): """ Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool """ try: remote.save(name, local.open(name)) return True except Exception as e: logger.error("Unable to save '%s' to remote storage. " "About to retry." % name) logger.exception(e) return False
Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/tasks.py#L102-L121
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
get_string
def get_string(cfunc, *args): """ Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str """ cstr = get_ctype("const char**", cfunc, *args) return backend.ffi.string(cstr).decode() if cstr else None
python
def get_string(cfunc, *args): """ Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str """ cstr = get_ctype("const char**", cfunc, *args) return backend.ffi.string(cstr).decode() if cstr else None
Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L26-L34
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
get_ctype
def get_ctype(rtype, cfunc, *args): """ Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type """ val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
python
def get_ctype(rtype, cfunc, *args): """ Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type """ val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L37-L49
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
new_gp_object
def new_gp_object(typename): """ Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type. """ obj_p = backend.ffi.new("{0}**".format(typename)) backend.CONSTRUCTORS[typename](obj_p) return obj_p[0]
python
def new_gp_object(typename): """ Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type. """ obj_p = backend.ffi.new("{0}**".format(typename)) backend.CONSTRUCTORS[typename](obj_p) return obj_p[0]
Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L52-L61
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
get_library_version
def get_library_version(): """ Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers """ version_str = ffi.string(lib.gp_library_version(True)[0]).decode() return tuple(int(x) for x in version_str.split('.'))
python
def get_library_version(): """ Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers """ version_str = ffi.string(lib.gp_library_version(True)[0]).decode() return tuple(int(x) for x in version_str.split('.'))
Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L23-L30
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
list_cameras
def list_cameras(): """ List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera` """ ctx = lib.gp_context_new() camlist_p = new_gp_object("CameraList") port_list_p = new_gp_object("GPPortInfoList") lib.gp_port_info_list_load(port_list_p) abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) lib.gp_abilities_list_detect(abilities_list_p, port_list_p, camlist_p, ctx) out = [] for idx in range(lib.gp_list_count(camlist_p)): name = get_string(lib.gp_list_get_name, camlist_p, idx) value = get_string(lib.gp_list_get_value, camlist_p, idx) # Skip iteration if no matches matches = re.match(r"usb:(\d+),(\d+)", value) if not matches: continue bus_no, device_no = (int(x) for x in matches.groups()) abilities = ffi.new("CameraAbilities*") ability_idx = lib.gp_abilities_list_lookup_model( abilities_list_p, name.encode()) lib.gp_abilities_list_get_abilities(abilities_list_p, ability_idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: out.append(Camera(bus_no, device_no, lazy=True, _abilities=abilities)) lib.gp_list_free(camlist_p) lib.gp_port_info_list_free(port_list_p) lib.gp_abilities_list_free(abilities_list_p) return out
python
def list_cameras(): """ List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera` """ ctx = lib.gp_context_new() camlist_p = new_gp_object("CameraList") port_list_p = new_gp_object("GPPortInfoList") lib.gp_port_info_list_load(port_list_p) abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) lib.gp_abilities_list_detect(abilities_list_p, port_list_p, camlist_p, ctx) out = [] for idx in range(lib.gp_list_count(camlist_p)): name = get_string(lib.gp_list_get_name, camlist_p, idx) value = get_string(lib.gp_list_get_value, camlist_p, idx) # Skip iteration if no matches matches = re.match(r"usb:(\d+),(\d+)", value) if not matches: continue bus_no, device_no = (int(x) for x in matches.groups()) abilities = ffi.new("CameraAbilities*") ability_idx = lib.gp_abilities_list_lookup_model( abilities_list_p, name.encode()) lib.gp_abilities_list_get_abilities(abilities_list_p, ability_idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: out.append(Camera(bus_no, device_no, lazy=True, _abilities=abilities)) lib.gp_list_free(camlist_p) lib.gp_port_info_list_free(port_list_p) lib.gp_abilities_list_free(abilities_list_p) return out
List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera`
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L33-L69
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
supported_cameras
def supported_cameras(): """ List the names of all cameras supported by libgphoto2, grouped by the name of their driver. """ ctx = lib.gp_context_new() abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) abilities = ffi.new("CameraAbilities*") out = [] for idx in range(lib.gp_abilities_list_count(abilities_list_p)): lib.gp_abilities_list_get_abilities(abilities_list_p, idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: libname = os.path.basename(ffi.string(abilities.library) .decode()) out.append((ffi.string(abilities.model).decode(), libname)) lib.gp_abilities_list_free(abilities_list_p) key_func = lambda name, driver: driver out = sorted(out, key=key_func) return {k: tuple(x[0] for x in v) for k, v in itertools.groupby(out, key_func)} return out
python
def supported_cameras(): """ List the names of all cameras supported by libgphoto2, grouped by the name of their driver. """ ctx = lib.gp_context_new() abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) abilities = ffi.new("CameraAbilities*") out = [] for idx in range(lib.gp_abilities_list_count(abilities_list_p)): lib.gp_abilities_list_get_abilities(abilities_list_p, idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: libname = os.path.basename(ffi.string(abilities.library) .decode()) out.append((ffi.string(abilities.model).decode(), libname)) lib.gp_abilities_list_free(abilities_list_p) key_func = lambda name, driver: driver out = sorted(out, key=key_func) return {k: tuple(x[0] for x in v) for k, v in itertools.groupby(out, key_func)} return out
List the names of all cameras supported by libgphoto2, grouped by the name of their driver.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L72-L92
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
VideoCaptureContext.stop
def stop(self): """ Stop the capture. """ self.camera._get_config()['actions']['movie'].set(False) self.videofile = self.camera._wait_for_event( event_type=lib.GP_EVENT_FILE_ADDED) if self._old_captarget != "Memory card": self.camera.config['settings']['capturetarget'].set( self._old_captarget)
python
def stop(self): """ Stop the capture. """ self.camera._get_config()['actions']['movie'].set(False) self.videofile = self.camera._wait_for_event( event_type=lib.GP_EVENT_FILE_ADDED) if self._old_captarget != "Memory card": self.camera.config['settings']['capturetarget'].set( self._old_captarget)
Stop the capture.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L154-L161
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.path
def path(self): """ Absolute path to the directory on the camera's filesystem. """ if self.parent is None: return "/" else: return os.path.join(self.parent.path, self.name)
python
def path(self): """ Absolute path to the directory on the camera's filesystem. """ if self.parent is None: return "/" else: return os.path.join(self.parent.path, self.name)
Absolute path to the directory on the camera's filesystem.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L182-L187
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.supported_operations
def supported_operations(self): """ All directory operations supported by the camera. """ return tuple(op for op in backend.DIR_OPS if self._dir_ops & op)
python
def supported_operations(self): """ All directory operations supported by the camera. """ return tuple(op for op in backend.DIR_OPS if self._dir_ops & op)
All directory operations supported by the camera.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L190-L192
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.exists
def exists(self): """ Check whether the directory exists on the camera. """ if self.name in ("", "/") and self.parent is None: return True else: return self in self.parent.directories
python
def exists(self): """ Check whether the directory exists on the camera. """ if self.name in ("", "/") and self.parent is None: return True else: return self in self.parent.directories
Check whether the directory exists on the camera.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L195-L200
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.files
def files(self): """ Get a generator that yields all files in the directory. """ filelist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_files(self._cam._cam, self.path.encode(), filelist_p, self._cam._ctx) for idx in range(lib.gp_list_count(filelist_p)): fname = get_string(lib.gp_list_get_name, filelist_p, idx) yield File(name=fname, directory=self, camera=self._cam) lib.gp_list_free(filelist_p)
python
def files(self): """ Get a generator that yields all files in the directory. """ filelist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_files(self._cam._cam, self.path.encode(), filelist_p, self._cam._ctx) for idx in range(lib.gp_list_count(filelist_p)): fname = get_string(lib.gp_list_get_name, filelist_p, idx) yield File(name=fname, directory=self, camera=self._cam) lib.gp_list_free(filelist_p)
Get a generator that yields all files in the directory.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L204-L212
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.directories
def directories(self): """ Get a generator that yields all subdirectories in the directory. """ dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
python
def directories(self): """ Get a generator that yields all subdirectories in the directory. """ dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
Get a generator that yields all subdirectories in the directory.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L216-L226
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.create
def create(self): """ Create the directory. """ lib.gp_camera_folder_make_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
def create(self): """ Create the directory. """ lib.gp_camera_folder_make_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
Create the directory.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L229-L233
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.remove
def remove(self): """ Remove the directory. """ lib.gp_camera_folder_remove_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
def remove(self): """ Remove the directory. """ lib.gp_camera_folder_remove_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
Remove the directory.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L236-L240
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.upload
def upload(self, local_path): """ Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode """ camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
python
def upload(self, local_path): """ Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode """ camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L243-L256
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.supported_operations
def supported_operations(self): """ All file operations supported by the camera. """ return tuple(op for op in backend.FILE_OPS if self._operations & op)
python
def supported_operations(self): """ All file operations supported by the camera. """ return tuple(op for op in backend.FILE_OPS if self._operations & op)
All file operations supported by the camera.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L277-L279
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.dimensions
def dimensions(self): """ Dimensions of the image. :rtype: :py:class:`ImageDimensions` """ return ImageDimensions(self._info.file.width, self._info.file.height)
python
def dimensions(self): """ Dimensions of the image. :rtype: :py:class:`ImageDimensions` """ return ImageDimensions(self._info.file.width, self._info.file.height)
Dimensions of the image. :rtype: :py:class:`ImageDimensions`
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L298-L303
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.permissions
def permissions(self): """ Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str """ can_read = self._info.file.permissions & lib.GP_FILE_PERM_READ can_write = self._info.file.permissions & lib.GP_FILE_PERM_DELETE return "{0}{1}".format("r" if can_read else "-", "w" if can_write else "-")
python
def permissions(self): """ Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str """ can_read = self._info.file.permissions & lib.GP_FILE_PERM_READ can_write = self._info.file.permissions & lib.GP_FILE_PERM_DELETE return "{0}{1}".format("r" if can_read else "-", "w" if can_write else "-")
Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L306-L317
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.save
def save(self, target_path, ftype='normal'): """ Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str """ camfile_p = ffi.new("CameraFile**") with open(target_path, 'wb') as fp: lib.gp_file_new_from_fd(camfile_p, fp.fileno()) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx)
python
def save(self, target_path, ftype='normal'): """ Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str """ camfile_p = ffi.new("CameraFile**") with open(target_path, 'wb') as fp: lib.gp_file_new_from_fd(camfile_p, fp.fileno()) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx)
Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L328-L342
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.get_data
def get_data(self, ftype='normal'): """ Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes """ camfile_p = ffi.new("CameraFile**") lib.gp_file_new(camfile_p) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx) data_p = ffi.new("char**") length_p = ffi.new("unsigned long*") lib.gp_file_get_data_and_size(camfile_p[0], data_p, length_p) byt = bytes(ffi.buffer(data_p[0], length_p[0])) # gphoto2 camera files MUST be freed. lib.gp_file_free(camfile_p[0]) # just to be safe. del data_p, length_p, camfile_p return byt
python
def get_data(self, ftype='normal'): """ Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes """ camfile_p = ffi.new("CameraFile**") lib.gp_file_new(camfile_p) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx) data_p = ffi.new("char**") length_p = ffi.new("unsigned long*") lib.gp_file_get_data_and_size(camfile_p[0], data_p, length_p) byt = bytes(ffi.buffer(data_p[0], length_p[0])) # gphoto2 camera files MUST be freed. lib.gp_file_free(camfile_p[0]) # just to be safe. del data_p, length_p, camfile_p return byt
Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L345-L366
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.iter_data
def iter_data(self, chunk_size=2**16, ftype='normal'): """ Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator """ self._check_type_supported(ftype) buf_p = ffi.new("char[{0}]".format(chunk_size)) size_p = ffi.new("uint64_t*") offset_p = ffi.new("uint64_t*") for chunk_idx in range(int(math.ceil(self.size/chunk_size))): size_p[0] = chunk_size lib.gp_camera_file_read( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], offset_p[0], buf_p, size_p, self._cam._ctx) yield ffi.buffer(buf_p, size_p[0])[:]
python
def iter_data(self, chunk_size=2**16, ftype='normal'): """ Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator """ self._check_type_supported(ftype) buf_p = ffi.new("char[{0}]".format(chunk_size)) size_p = ffi.new("uint64_t*") offset_p = ffi.new("uint64_t*") for chunk_idx in range(int(math.ceil(self.size/chunk_size))): size_p[0] = chunk_size lib.gp_camera_file_read( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], offset_p[0], buf_p, size_p, self._cam._ctx) yield ffi.buffer(buf_p, size_p[0])[:]
Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L369-L388
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.remove
def remove(self): """ Remove file from device. """ lib.gp_camera_file_delete(self._cam._cam, self.directory.path.encode(), self.name.encode(), self._cam._ctx)
python
def remove(self): """ Remove file from device. """ lib.gp_camera_file_delete(self._cam._cam, self.directory.path.encode(), self.name.encode(), self._cam._ctx)
Remove file from device.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L391-L394
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
ConfigItem.set
def set(self, value): """ Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set """ if self.readonly: raise ValueError("Option is read-only.") val_p = None if self.type == 'selection': if value not in self.choices: raise ValueError("Invalid choice (valid: {0})".format( repr(self.choices))) val_p = ffi.new("const char[]", value.encode()) elif self.type == 'text': if not isinstance(value, basestring): raise ValueError("Value must be a string.") val_p = ffi.new("char**") val_p[0] = ffi.new("char[]", value.encode()) elif self.type == 'range': if value < self.range.min or value > self.range.max: raise ValueError("Value exceeds valid range ({0}-{1}." .format(self.range.min, self.range.max)) if value % self.range.step: raise ValueError("Value can only be changed in steps of {0}." .format(self.range.step)) val_p = ffi.new("float*") val_p[0] = value elif self.type == 'toggle': if not isinstance(value, bool): raise ValueError("Value must be bool.") val_p = ffi.new("int*") val_p[0] = int(value) elif self.type == 'date': val_p = ffi.new("int*") val_p[0] = value lib.gp_widget_set_value(self._widget, val_p) lib.gp_camera_set_config(self._cam._cam, self._root, self._cam._ctx) self.value = value
python
def set(self, value): """ Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set """ if self.readonly: raise ValueError("Option is read-only.") val_p = None if self.type == 'selection': if value not in self.choices: raise ValueError("Invalid choice (valid: {0})".format( repr(self.choices))) val_p = ffi.new("const char[]", value.encode()) elif self.type == 'text': if not isinstance(value, basestring): raise ValueError("Value must be a string.") val_p = ffi.new("char**") val_p[0] = ffi.new("char[]", value.encode()) elif self.type == 'range': if value < self.range.min or value > self.range.max: raise ValueError("Value exceeds valid range ({0}-{1}." .format(self.range.min, self.range.max)) if value % self.range.step: raise ValueError("Value can only be changed in steps of {0}." .format(self.range.step)) val_p = ffi.new("float*") val_p[0] = value elif self.type == 'toggle': if not isinstance(value, bool): raise ValueError("Value must be bool.") val_p = ffi.new("int*") val_p[0] = int(value) elif self.type == 'date': val_p = ffi.new("int*") val_p[0] = value lib.gp_widget_set_value(self._widget, val_p) lib.gp_camera_set_config(self._cam._cam, self._root, self._cam._ctx) self.value = value
Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L468-L511
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.supported_operations
def supported_operations(self): """ All operations supported by the camera. """ return tuple(op for op in backend.CAM_OPS if self._abilities.operations & op)
python
def supported_operations(self): """ All operations supported by the camera. """ return tuple(op for op in backend.CAM_OPS if self._abilities.operations & op)
All operations supported by the camera.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L571-L574
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.usb_info
def usb_info(self): """ The camera's USB information. """ return UsbInformation(self._abilities.usb_vendor, self._abilities.usb_product, self._abilities.usb_class, self._abilities.usb_subclass, self._abilities.usb_protocol)
python
def usb_info(self): """ The camera's USB information. """ return UsbInformation(self._abilities.usb_vendor, self._abilities.usb_product, self._abilities.usb_class, self._abilities.usb_subclass, self._abilities.usb_protocol)
The camera's USB information.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L577-L583
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.config
def config(self): """ Writeable configuration parameters. :rtype: dict """ config = self._get_config() return {section: {itm.name: itm for itm in config[section].values() if not itm.readonly} for section in config if 'settings' in section or section == 'other'}
python
def config(self): """ Writeable configuration parameters. :rtype: dict """ config = self._get_config() return {section: {itm.name: itm for itm in config[section].values() if not itm.readonly} for section in config if 'settings' in section or section == 'other'}
Writeable configuration parameters. :rtype: dict
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L591-L600
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.status
def status(self): """ Status information (read-only). :rtype: :py:class:`SimpleNamespace` """ config = self._get_config() is_hex = lambda name: (len(name) == 4 and all(c in string.hexdigits for c in name)) out = SimpleNamespace() for sect in config: for itm in config[sect].values(): if (itm.readonly or sect == 'status') and not is_hex(itm.name): setattr(out, itm.name, itm.value) return out
python
def status(self): """ Status information (read-only). :rtype: :py:class:`SimpleNamespace` """ config = self._get_config() is_hex = lambda name: (len(name) == 4 and all(c in string.hexdigits for c in name)) out = SimpleNamespace() for sect in config: for itm in config[sect].values(): if (itm.readonly or sect == 'status') and not is_hex(itm.name): setattr(out, itm.name, itm.value) return out
Status information (read-only). :rtype: :py:class:`SimpleNamespace`
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L603-L616
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.storage_info
def storage_info(self): """ Information about the camera's storage. """ info_p = ffi.new("CameraStorageInformation**") num_info_p = ffi.new("int*") lib.gp_camera_get_storageinfo(self._cam, info_p, num_info_p, self._ctx) infos = [] for idx in range(num_info_p[0]): out = SimpleNamespace() struc = (info_p[0] + idx) fields = struc.fields if lib.GP_STORAGEINFO_BASE & fields: out.directory = next( (d for d in self.list_all_directories() if d.path == ffi.string(struc.basedir).decode()), None) if lib.GP_STORAGEINFO_LABEL & fields: out.label = ffi.string(struc.label).decode() if lib.GP_STORAGEINFO_DESCRIPTION & fields: out.description = ffi.string(struc.description).decode() if lib.GP_STORAGEINFO_STORAGETYPE & fields: stype = struc.type if lib.GP_STORAGEINFO_ST_FIXED_ROM & stype: out.type = 'fixed_rom' elif lib.GP_STORAGEINFO_ST_REMOVABLE_ROM & stype: out.type = 'removable_rom' elif lib.GP_STORAGEINFO_ST_FIXED_RAM & stype: out.type = 'fixed_ram' elif lib.GP_STORAGEINFO_ST_REMOVABLE_RAM & stype: out.type = 'removable_ram' else: out.type = 'unknown' if lib.GP_STORAGEINFO_ACCESS & fields: if lib.GP_STORAGEINFO_AC_READWRITE & struc.access: out.access = 'read-write' elif lib.GP_STORAGEINFO_AC_READONLY & struc.access: out.access = 'read-only' elif lib.GP_STORAGEINFO_AC_READONLY_WITH_DELETE & struc.access: out.access = 'read-delete' if lib.GP_STORAGEINFO_MAXCAPACITY & fields: out.capacity = int(struc.capacitykbytes) if lib.GP_STORAGEINFO_FREESPACEKBYTES & fields: out.free_space = int(struc.freekbytes) if lib.GP_STORAGEINFO_FREESPACEIMAGES & fields: out.remaining_images = int(struc.freeimages) infos.append(out) return infos
python
def storage_info(self): """ Information about the camera's storage. """ info_p = ffi.new("CameraStorageInformation**") num_info_p = ffi.new("int*") lib.gp_camera_get_storageinfo(self._cam, info_p, num_info_p, self._ctx) infos = [] for idx in range(num_info_p[0]): out = SimpleNamespace() struc = (info_p[0] + idx) fields = struc.fields if lib.GP_STORAGEINFO_BASE & fields: out.directory = next( (d for d in self.list_all_directories() if d.path == ffi.string(struc.basedir).decode()), None) if lib.GP_STORAGEINFO_LABEL & fields: out.label = ffi.string(struc.label).decode() if lib.GP_STORAGEINFO_DESCRIPTION & fields: out.description = ffi.string(struc.description).decode() if lib.GP_STORAGEINFO_STORAGETYPE & fields: stype = struc.type if lib.GP_STORAGEINFO_ST_FIXED_ROM & stype: out.type = 'fixed_rom' elif lib.GP_STORAGEINFO_ST_REMOVABLE_ROM & stype: out.type = 'removable_rom' elif lib.GP_STORAGEINFO_ST_FIXED_RAM & stype: out.type = 'fixed_ram' elif lib.GP_STORAGEINFO_ST_REMOVABLE_RAM & stype: out.type = 'removable_ram' else: out.type = 'unknown' if lib.GP_STORAGEINFO_ACCESS & fields: if lib.GP_STORAGEINFO_AC_READWRITE & struc.access: out.access = 'read-write' elif lib.GP_STORAGEINFO_AC_READONLY & struc.access: out.access = 'read-only' elif lib.GP_STORAGEINFO_AC_READONLY_WITH_DELETE & struc.access: out.access = 'read-delete' if lib.GP_STORAGEINFO_MAXCAPACITY & fields: out.capacity = int(struc.capacitykbytes) if lib.GP_STORAGEINFO_FREESPACEKBYTES & fields: out.free_space = int(struc.freekbytes) if lib.GP_STORAGEINFO_FREESPACEIMAGES & fields: out.remaining_images = int(struc.freeimages) infos.append(out) return infos
Information about the camera's storage.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L625-L670
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.list_all_files
def list_all_files(self): """ Utility method that yields all files on the device's file systems. """ def list_files_recursively(directory): f_gen = itertools.chain( directory.files, *tuple(list_files_recursively(d) for d in directory.directories)) for f in f_gen: yield f return list_files_recursively(self.filesystem)
python
def list_all_files(self): """ Utility method that yields all files on the device's file systems. """ def list_files_recursively(directory): f_gen = itertools.chain( directory.files, *tuple(list_files_recursively(d) for d in directory.directories)) for f in f_gen: yield f return list_files_recursively(self.filesystem)
Utility method that yields all files on the device's file systems.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L672-L683
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.list_all_directories
def list_all_directories(self): """ Utility method that yields all directories on the device's file systems. """ def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory.directories)) for d in d_gen: yield d return list_dirs_recursively(self.filesystem)
python
def list_all_directories(self): """ Utility method that yields all directories on the device's file systems. """ def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory.directories)) for d in d_gen: yield d return list_dirs_recursively(self.filesystem)
Utility method that yields all directories on the device's file systems.
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L685-L698
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.capture
def capture(self, to_camera_storage=False): """ Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes """ target = self.config['settings']['capturetarget'] if to_camera_storage and target.value != "Memory card": target.set("Memory card") elif not to_camera_storage and target.value != "Internal RAM": target.set("Internal RAM") lib.gp_camera_trigger_capture(self._cam, self._ctx) fobj = self._wait_for_event(event_type=lib.GP_EVENT_FILE_ADDED) if to_camera_storage: self._logger.info("File written to storage at {0}.".format(fobj)) return fobj else: data = fobj.get_data() try: fobj.remove() except errors.CameraIOError: # That probably means the file is already gone from RAM, # so nothing to worry about. pass return data
python
def capture(self, to_camera_storage=False): """ Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes """ target = self.config['settings']['capturetarget'] if to_camera_storage and target.value != "Memory card": target.set("Memory card") elif not to_camera_storage and target.value != "Internal RAM": target.set("Internal RAM") lib.gp_camera_trigger_capture(self._cam, self._ctx) fobj = self._wait_for_event(event_type=lib.GP_EVENT_FILE_ADDED) if to_camera_storage: self._logger.info("File written to storage at {0}.".format(fobj)) return fobj else: data = fobj.get_data() try: fobj.remove() except errors.CameraIOError: # That probably means the file is already gone from RAM, # so nothing to worry about. pass return data
Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L701-L735
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.capture_video
def capture_video(self, length): """ Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File` """ with self.capture_video_context() as ctx: time.sleep(length) return ctx.videofile
python
def capture_video(self, length): """ Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File` """ with self.capture_video_context() as ctx: time.sleep(length) return ctx.videofile
Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File`
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L747-L762
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.get_preview
def get_preview(self): """ Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes """ lib.gp_camera_capture_preview(self._cam, self.__camfile_p[0], self._ctx) lib.gp_file_get_data_and_size(self.__camfile_p[0], self.__data_p, self.__length_p) return ffi.buffer(self.__data_p[0], self.__length_p[0])[:]
python
def get_preview(self): """ Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes """ lib.gp_camera_capture_preview(self._cam, self.__camfile_p[0], self._ctx) lib.gp_file_get_data_and_size(self.__camfile_p[0], self.__data_p, self.__length_p) return ffi.buffer(self.__data_p[0], self.__length_p[0])[:]
Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L764-L776
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.get_storage
def get_storage(self, name): """ Returns the storage backend instance responsible for the file with the given name (either local or remote). This method is used in most of the storage API methods. :param name: file name :type name: str :rtype: :class:`~django:django.core.files.storage.Storage` """ cache_result = cache.get(self.get_cache_key(name)) if cache_result: return self.remote elif cache_result is None and self.remote.exists(name): cache.set(self.get_cache_key(name), True) return self.remote else: return self.local
python
def get_storage(self, name): """ Returns the storage backend instance responsible for the file with the given name (either local or remote). This method is used in most of the storage API methods. :param name: file name :type name: str :rtype: :class:`~django:django.core.files.storage.Storage` """ cache_result = cache.get(self.get_cache_key(name)) if cache_result: return self.remote elif cache_result is None and self.remote.exists(name): cache.set(self.get_cache_key(name), True) return self.remote else: return self.local
Returns the storage backend instance responsible for the file with the given name (either local or remote). This method is used in most of the storage API methods. :param name: file name :type name: str :rtype: :class:`~django:django.core.files.storage.Storage`
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L111-L128
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.open
def open(self, name, mode='rb'): """ Retrieves the specified file from storage. :param name: file name :type name: str :param mode: mode to open the file with :type mode: str :rtype: :class:`~django:django.core.files.File` """ return self.get_storage(name).open(name, mode)
python
def open(self, name, mode='rb'): """ Retrieves the specified file from storage. :param name: file name :type name: str :param mode: mode to open the file with :type mode: str :rtype: :class:`~django:django.core.files.File` """ return self.get_storage(name).open(name, mode)
Retrieves the specified file from storage. :param name: file name :type name: str :param mode: mode to open the file with :type mode: str :rtype: :class:`~django:django.core.files.File`
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L162-L172
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.save
def save(self, name, content, max_length=None): """ Saves the given content with the given name using the local storage. If the :attr:`~queued_storage.backends.QueuedStorage.delayed` attribute is ``True`` this will automatically call the :meth:`~queued_storage.backends.QueuedStorage.transfer` method queuing the transfer from local to remote storage. :param name: file name :type name: str :param content: content of the file specified by name :type content: :class:`~django:django.core.files.File` :rtype: str """ cache_key = self.get_cache_key(name) cache.set(cache_key, False) # Use a name that is available on both the local and remote storage # systems and save locally. name = self.get_available_name(name) try: name = self.local.save(name, content, max_length=max_length) except TypeError: # Django < 1.10 name = self.local.save(name, content) # Pass on the cache key to prevent duplicate cache key creation, # we save the result in the storage to be able to test for it if not self.delayed: self.result = self.transfer(name, cache_key=cache_key) return name
python
def save(self, name, content, max_length=None): """ Saves the given content with the given name using the local storage. If the :attr:`~queued_storage.backends.QueuedStorage.delayed` attribute is ``True`` this will automatically call the :meth:`~queued_storage.backends.QueuedStorage.transfer` method queuing the transfer from local to remote storage. :param name: file name :type name: str :param content: content of the file specified by name :type content: :class:`~django:django.core.files.File` :rtype: str """ cache_key = self.get_cache_key(name) cache.set(cache_key, False) # Use a name that is available on both the local and remote storage # systems and save locally. name = self.get_available_name(name) try: name = self.local.save(name, content, max_length=max_length) except TypeError: # Django < 1.10 name = self.local.save(name, content) # Pass on the cache key to prevent duplicate cache key creation, # we save the result in the storage to be able to test for it if not self.delayed: self.result = self.transfer(name, cache_key=cache_key) return name
Saves the given content with the given name using the local storage. If the :attr:`~queued_storage.backends.QueuedStorage.delayed` attribute is ``True`` this will automatically call the :meth:`~queued_storage.backends.QueuedStorage.transfer` method queuing the transfer from local to remote storage. :param name: file name :type name: str :param content: content of the file specified by name :type content: :class:`~django:django.core.files.File` :rtype: str
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L174-L204
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.transfer
def transfer(self, name, cache_key=None): """ Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result """ if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options)
python
def transfer(self, name, cache_key=None): """ Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result """ if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options)
Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L206-L221
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.get_available_name
def get_available_name(self, name): """ Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str """ local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name
python
def get_available_name(self, name): """ Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str """ local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name
Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L234-L248
mongolab/dex
dex/analyzer.py
QueryAnalyzer.generate_query_report
def generate_query_report(self, db_uri, parsed_query, db_name, collection_name): """Generates a comprehensive report on the raw query""" index_analysis = None recommendation = None namespace = parsed_query['ns'] indexStatus = "unknown" index_cache_entry = self._ensure_index_cache(db_uri, db_name, collection_name) query_analysis = self._generate_query_analysis(parsed_query, db_name, collection_name) if ((query_analysis['analyzedFields'] != []) and query_analysis['supported']): index_analysis = self._generate_index_analysis(query_analysis, index_cache_entry['indexes']) indexStatus = index_analysis['indexStatus'] if index_analysis['indexStatus'] != 'full': recommendation = self._generate_recommendation(query_analysis, db_name, collection_name) # a temporary fix to suppress faulty parsing of $regexes. # if the recommendation cannot be re-parsed into yaml, we assume # it is invalid. if not validate_yaml(recommendation['index']): recommendation = None query_analysis['supported'] = False # QUERY REPORT return OrderedDict({ 'queryMask': parsed_query['queryMask'], 'indexStatus': indexStatus, 'parsed': parsed_query, 'namespace': namespace, 'queryAnalysis': query_analysis, 'indexAnalysis': index_analysis, 'recommendation': recommendation })
python
def generate_query_report(self, db_uri, parsed_query, db_name, collection_name): """Generates a comprehensive report on the raw query""" index_analysis = None recommendation = None namespace = parsed_query['ns'] indexStatus = "unknown" index_cache_entry = self._ensure_index_cache(db_uri, db_name, collection_name) query_analysis = self._generate_query_analysis(parsed_query, db_name, collection_name) if ((query_analysis['analyzedFields'] != []) and query_analysis['supported']): index_analysis = self._generate_index_analysis(query_analysis, index_cache_entry['indexes']) indexStatus = index_analysis['indexStatus'] if index_analysis['indexStatus'] != 'full': recommendation = self._generate_recommendation(query_analysis, db_name, collection_name) # a temporary fix to suppress faulty parsing of $regexes. # if the recommendation cannot be re-parsed into yaml, we assume # it is invalid. if not validate_yaml(recommendation['index']): recommendation = None query_analysis['supported'] = False # QUERY REPORT return OrderedDict({ 'queryMask': parsed_query['queryMask'], 'indexStatus': indexStatus, 'parsed': parsed_query, 'namespace': namespace, 'queryAnalysis': query_analysis, 'indexAnalysis': index_analysis, 'recommendation': recommendation })
Generates a comprehensive report on the raw query
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L47-L88
mongolab/dex
dex/analyzer.py
QueryAnalyzer._ensure_index_cache
def _ensure_index_cache(self, db_uri, db_name, collection_name): """Adds a collections index entries to the cache if not present""" if not self._check_indexes or db_uri is None: return {'indexes': None} if db_name not in self.get_cache(): self._internal_map[db_name] = {} if collection_name not in self._internal_map[db_name]: indexes = [] try: if self._index_cache_connection is None: self._index_cache_connection = pymongo.MongoClient(db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) db = self._index_cache_connection[db_name] indexes = db[collection_name].index_information() except: warning = 'Warning: unable to connect to ' + db_uri + "\n" else: internal_map_entry = {'indexes': indexes} self.get_cache()[db_name][collection_name] = internal_map_entry return self.get_cache()[db_name][collection_name]
python
def _ensure_index_cache(self, db_uri, db_name, collection_name): """Adds a collections index entries to the cache if not present""" if not self._check_indexes or db_uri is None: return {'indexes': None} if db_name not in self.get_cache(): self._internal_map[db_name] = {} if collection_name not in self._internal_map[db_name]: indexes = [] try: if self._index_cache_connection is None: self._index_cache_connection = pymongo.MongoClient(db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) db = self._index_cache_connection[db_name] indexes = db[collection_name].index_information() except: warning = 'Warning: unable to connect to ' + db_uri + "\n" else: internal_map_entry = {'indexes': indexes} self.get_cache()[db_name][collection_name] = internal_map_entry return self.get_cache()[db_name][collection_name]
Adds a collections index entries to the cache if not present
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L91-L112
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_query_analysis
def _generate_query_analysis(self, parsed_query, db_name, collection_name): """Translates a raw query object into a Dex query analysis""" analyzed_fields = [] field_count = 0 supported = True sort_fields = [] query_mask = None if 'command' in parsed_query and parsed_query['command'] not in SUPPORTED_COMMANDS: supported = False else: #if 'orderby' in parsed_query: sort_component = parsed_query['orderby'] if 'orderby' in parsed_query else [] sort_seq = 0 for key in sort_component: sort_field = {'fieldName': key, 'fieldType': SORT_TYPE, 'seq': sort_seq} sort_fields.append(key) analyzed_fields.append(sort_field) field_count += 1 sort_seq += 1 query_component = parsed_query['query'] if 'query' in parsed_query else {} for key in query_component: if key not in sort_fields: field_type = UNSUPPORTED_TYPE if ((key not in UNSUPPORTED_QUERY_OPERATORS) and (key not in COMPOSITE_QUERY_OPERATORS)): try: if query_component[key] == {}: raise nested_field_list = query_component[key].keys() except: field_type = EQUIV_TYPE else: for nested_field in nested_field_list: if ((nested_field in RANGE_QUERY_OPERATORS) and (nested_field not in UNSUPPORTED_QUERY_OPERATORS)): field_type = RANGE_TYPE else: supported = False field_type = UNSUPPORTED_TYPE break if field_type is UNSUPPORTED_TYPE: supported = False analyzed_field = {'fieldName': key, 'fieldType': field_type} analyzed_fields.append(analyzed_field) field_count += 1 query_mask = parsed_query['queryMask'] # QUERY ANALYSIS return OrderedDict({ 'analyzedFields': analyzed_fields, 'fieldCount': field_count, 'supported': supported, 'queryMask': query_mask })
python
def _generate_query_analysis(self, parsed_query, db_name, collection_name): """Translates a raw query object into a Dex query analysis""" analyzed_fields = [] field_count = 0 supported = True sort_fields = [] query_mask = None if 'command' in parsed_query and parsed_query['command'] not in SUPPORTED_COMMANDS: supported = False else: #if 'orderby' in parsed_query: sort_component = parsed_query['orderby'] if 'orderby' in parsed_query else [] sort_seq = 0 for key in sort_component: sort_field = {'fieldName': key, 'fieldType': SORT_TYPE, 'seq': sort_seq} sort_fields.append(key) analyzed_fields.append(sort_field) field_count += 1 sort_seq += 1 query_component = parsed_query['query'] if 'query' in parsed_query else {} for key in query_component: if key not in sort_fields: field_type = UNSUPPORTED_TYPE if ((key not in UNSUPPORTED_QUERY_OPERATORS) and (key not in COMPOSITE_QUERY_OPERATORS)): try: if query_component[key] == {}: raise nested_field_list = query_component[key].keys() except: field_type = EQUIV_TYPE else: for nested_field in nested_field_list: if ((nested_field in RANGE_QUERY_OPERATORS) and (nested_field not in UNSUPPORTED_QUERY_OPERATORS)): field_type = RANGE_TYPE else: supported = False field_type = UNSUPPORTED_TYPE break if field_type is UNSUPPORTED_TYPE: supported = False analyzed_field = {'fieldName': key, 'fieldType': field_type} analyzed_fields.append(analyzed_field) field_count += 1 query_mask = parsed_query['queryMask'] # QUERY ANALYSIS return OrderedDict({ 'analyzedFields': analyzed_fields, 'fieldCount': field_count, 'supported': supported, 'queryMask': query_mask })
Translates a raw query object into a Dex query analysis
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L115-L177
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_index_analysis
def _generate_index_analysis(self, query_analysis, indexes): """Compares a query signature to the index cache to identify complete and partial indexes available to the query""" needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
python
def _generate_index_analysis(self, query_analysis, indexes): """Compares a query signature to the index cache to identify complete and partial indexes available to the query""" needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
Compares a query signature to the index cache to identify complete and partial indexes available to the query
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L180-L211
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_index_report
def _generate_index_report(self, index, query_analysis): """Analyzes an existing index against the results of query analysis""" all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
python
def _generate_index_report(self, index, query_analysis): """Analyzes an existing index against the results of query analysis""" all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
Analyzes an existing index against the results of query analysis
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L214-L273
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_recommendation
def _generate_recommendation(self, query_analysis, db_name, collection_name): """Generates an ideal query recommendation""" index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
python
def _generate_recommendation(self, query_analysis, db_name, collection_name): """Generates an ideal query recommendation""" index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
Generates an ideal query recommendation
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L276-L301
mongolab/dex
dex/analyzer.py
ReportAggregation.add_query_occurrence
def add_query_occurrence(self, report): """Adds a report to the report aggregation""" initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
python
def add_query_occurrence(self, report): """Adds a report to the report aggregation""" initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
Adds a report to the report aggregation
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L327-L350
mongolab/dex
dex/analyzer.py
ReportAggregation.get_reports
def get_reports(self): """Returns a minimized version of the aggregation""" return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
python
def get_reports(self): """Returns a minimized version of the aggregation""" return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
Returns a minimized version of the aggregation
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L353-L357
mongolab/dex
dex/analyzer.py
ReportAggregation._get_existing_report
def _get_existing_report(self, mask, report): """Returns the aggregated report that matches report""" for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
python
def _get_existing_report(self, mask, report): """Returns the aggregated report that matches report""" for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
Returns the aggregated report that matches report
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L360-L366
mongolab/dex
dex/analyzer.py
ReportAggregation._merge_report
def _merge_report(self, target, new): """Merges a new report into the target report""" time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
python
def _merge_report(self, target, new): """Merges a new report into the target report""" time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
Merges a new report into the target report
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L369-L383
mongolab/dex
dex/parsers.py
Parser.parse
def parse(self, input): """Passes input to each QueryLineHandler in use""" query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
python
def parse(self, input): """Passes input to each QueryLineHandler in use""" query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
Passes input to each QueryLineHandler in use
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/parsers.py#L67-L78
mongolab/dex
dex/dex.py
Dex.generate_query_report
def generate_query_report(self, db_uri, query, db_name, collection_name): """Analyzes a single query""" return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
python
def generate_query_report(self, db_uri, query, db_name, collection_name): """Analyzes a single query""" return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
Analyzes a single query
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L77-L82
mongolab/dex
dex/dex.py
Dex.analyze_profile
def analyze_profile(self): """Analyzes queries from a given log file""" profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI or provide " \ + "a namespace filter with -n.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) for database in databases: db = connection[database] profile_entries = db['system.profile'].find() for profile_entry in profile_entries: self._process_query(profile_entry, profile_parser) self._output_aggregated_report(sys.stdout) return 0
python
def analyze_profile(self): """Analyzes queries from a given log file""" profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI or provide " \ + "a namespace filter with -n.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) for database in databases: db = connection[database] profile_entries = db['system.profile'].find() for profile_entry in profile_entries: self._process_query(profile_entry, profile_parser) self._output_aggregated_report(sys.stdout) return 0
Analyzes queries from a given log file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L132-L166
mongolab/dex
dex/dex.py
Dex.watch_profile
def watch_profile(self): """Analyzes queries from a given log file""" profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) enabled_profile = False if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) if len(databases) != 1: message = "Error: Please use namespaces (-n) to specify a single " \ + "database for profile watching.\n" sys.stderr.write(message) return 1 database = databases[0] db = connection[database] initial_profile_level = db.profiling_level() if initial_profile_level is pymongo.OFF: message = "Profile level currently 0. Dex is setting profile " \ + "level 1. To run --watch at profile level 2, " \ + "enable profile level 2 before running Dex.\n" sys.stderr.write(message) db.set_profiling_level(DEFAULT_PROFILE_LEVEL) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: for profile_entry in self._tail_profile(db, WATCH_INTERVAL_SECONDS): self._process_query(profile_entry, profile_parser) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) if initial_profile_level is pymongo.OFF: message = "Dex is resetting profile level to initial value " \ + "of 0. You may wish to drop the system.profile " \ + "collection.\n" sys.stderr.write(message) db.set_profiling_level(initial_profile_level) return 0
python
def watch_profile(self): """Analyzes queries from a given log file""" profile_parser = ProfileParser() databases = self._get_requested_databases() connection = pymongo.MongoClient(self._db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) enabled_profile = False if databases == []: try: databases = connection.database_names() except: message = "Error: Could not list databases on server. Please " \ + "check the auth components of your URI.\n" sys.stderr.write(message) databases = [] for ignore_db in IGNORE_DBS: if ignore_db in databases: databases.remove(ignore_db) if len(databases) != 1: message = "Error: Please use namespaces (-n) to specify a single " \ + "database for profile watching.\n" sys.stderr.write(message) return 1 database = databases[0] db = connection[database] initial_profile_level = db.profiling_level() if initial_profile_level is pymongo.OFF: message = "Profile level currently 0. Dex is setting profile " \ + "level 1. To run --watch at profile level 2, " \ + "enable profile level 2 before running Dex.\n" sys.stderr.write(message) db.set_profiling_level(DEFAULT_PROFILE_LEVEL) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: for profile_entry in self._tail_profile(db, WATCH_INTERVAL_SECONDS): self._process_query(profile_entry, profile_parser) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) if initial_profile_level is pymongo.OFF: message = "Dex is resetting profile level to initial value " \ + "of 0. You may wish to drop the system.profile " \ + "collection.\n" sys.stderr.write(message) db.set_profiling_level(initial_profile_level) return 0
Analyzes queries from a given log file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L169-L228
mongolab/dex
dex/dex.py
Dex.analyze_logfile
def analyze_logfile(self, logfile_path): self._run_stats['logSource'] = logfile_path """Analyzes queries from a given log file""" with open(logfile_path) as obj: self.analyze_logfile_object(obj) self._output_aggregated_report(sys.stdout) return 0
python
def analyze_logfile(self, logfile_path): self._run_stats['logSource'] = logfile_path """Analyzes queries from a given log file""" with open(logfile_path) as obj: self.analyze_logfile_object(obj) self._output_aggregated_report(sys.stdout) return 0
Analyzes queries from a given log file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L231-L239
mongolab/dex
dex/dex.py
Dex.analyze_logfile_object
def analyze_logfile_object(self, file_object): """Analyzes queries from a given log file""" log_parser = LogParser() if self._start_time is None: self._start_time = datetime.now() if self._timeout != 0: self._end_time = self._start_time + timedelta(minutes=self._timeout) else: self._end_time = None # For each line in the logfile ... for line in file_object: if self._end_time is not None and datetime.now() > self._end_time: self._run_stats['timedOut'] = True self._run_stats['timeoutInMinutes'] = self._timeout break self._process_query(line, log_parser) return 0
python
def analyze_logfile_object(self, file_object): """Analyzes queries from a given log file""" log_parser = LogParser() if self._start_time is None: self._start_time = datetime.now() if self._timeout != 0: self._end_time = self._start_time + timedelta(minutes=self._timeout) else: self._end_time = None # For each line in the logfile ... for line in file_object: if self._end_time is not None and datetime.now() > self._end_time: self._run_stats['timedOut'] = True self._run_stats['timeoutInMinutes'] = self._timeout break self._process_query(line, log_parser) return 0
Analyzes queries from a given log file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L242-L261
mongolab/dex
dex/dex.py
Dex.watch_logfile
def watch_logfile(self, logfile_path): """Analyzes queries from the tail of a given log file""" self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
python
def watch_logfile(self, logfile_path): """Analyzes queries from the tail of a given log file""" self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
Analyzes queries from the tail of a given log file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L264-L287
mongolab/dex
dex/dex.py
Dex._tail_file
def _tail_file(self, file, interval): """Tails a file""" file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
python
def _tail_file(self, file, interval): """Tails a file""" file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
Tails a file
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L316-L326
mongolab/dex
dex/dex.py
Dex._tail_profile
def _tail_profile(self, db, interval): """Tails the system.profile collection""" latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
python
def _tail_profile(self, db, interval): """Tails the system.profile collection""" latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
Tails the system.profile collection
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L329-L343
mongolab/dex
dex/dex.py
Dex._tuplefy_namespace
def _tuplefy_namespace(self, namespace): """Converts a mongodb namespace to a db, collection tuple""" namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
python
def _tuplefy_namespace(self, namespace): """Converts a mongodb namespace to a db, collection tuple""" namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
Converts a mongodb namespace to a db, collection tuple
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L347-L358
mongolab/dex
dex/dex.py
Dex._validate_namespaces
def _validate_namespaces(self, input_namespaces): """Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards""" output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
python
def _validate_namespaces(self, input_namespaces): """Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards""" output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L362-L390
mongolab/dex
dex/dex.py
Dex._namespace_requested
def _namespace_requested(self, namespace): """Checks whether the requested_namespaces contain the provided namespace""" if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
python
def _namespace_requested(self, namespace): """Checks whether the requested_namespaces contain the provided namespace""" if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
Checks whether the requested_namespaces contain the provided namespace
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L393-L404
mongolab/dex
dex/dex.py
Dex._tuple_requested
def _tuple_requested(self, namespace_tuple): """Helper for _namespace_requested. Supports limited wildcards""" if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
python
def _tuple_requested(self, namespace_tuple): """Helper for _namespace_requested. Supports limited wildcards""" if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
Helper for _namespace_requested. Supports limited wildcards
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L407-L428
mongolab/dex
dex/dex.py
Dex._get_requested_databases
def _get_requested_databases(self): """Returns a list of databases requested, not including ignored dbs""" requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
python
def _get_requested_databases(self): """Returns a list of databases requested, not including ignored dbs""" requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
Returns a list of databases requested, not including ignored dbs
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L431-L441
rakanalh/pocket-api
pocket/__init__.py
Pocket.retrieve
def retrieve(self, state=None, favorite=None, tag=None, contentType=None, sort=None, detailType=None, search=None, domain=None, since=None, count=None, offset=None): """ Retrieve the list of your articles See: https://getpocket.com/developer/docs/v3/retrieve :param state: filter by state :param favorite: only fetch favorite :param tag: filter by tag or _untagged_ :param contentType: get article, video or image :param sort: sort by provided value :param detailType: defines the response details to return :param search: search term :param domain: search domain :param since: search modified since unix timestamp :param count: the number of required items :param offset: the position to start results from :return: A dictionary containing the response result :rtype: dict """ return self._make_request('get')
python
def retrieve(self, state=None, favorite=None, tag=None, contentType=None, sort=None, detailType=None, search=None, domain=None, since=None, count=None, offset=None): """ Retrieve the list of your articles See: https://getpocket.com/developer/docs/v3/retrieve :param state: filter by state :param favorite: only fetch favorite :param tag: filter by tag or _untagged_ :param contentType: get article, video or image :param sort: sort by provided value :param detailType: defines the response details to return :param search: search term :param domain: search domain :param since: search modified since unix timestamp :param count: the number of required items :param offset: the position to start results from :return: A dictionary containing the response result :rtype: dict """ return self._make_request('get')
Retrieve the list of your articles See: https://getpocket.com/developer/docs/v3/retrieve :param state: filter by state :param favorite: only fetch favorite :param tag: filter by tag or _untagged_ :param contentType: get article, video or image :param sort: sort by provided value :param detailType: defines the response details to return :param search: search term :param domain: search domain :param since: search modified since unix timestamp :param count: the number of required items :param offset: the position to start results from :return: A dictionary containing the response result :rtype: dict
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L39-L59
rakanalh/pocket-api
pocket/__init__.py
Pocket.bulk_add
def bulk_add(self, item_id, ref_id=None, tags=None, time=None, title=None, url=None): """ Add an item to list See: https://getpocket.com/developer/docs/v3/modify :param item_id: int :param ref_id: tweet_id :param tags: list of tags :param time: time of action :param title: given title :param url: item url :return: self for chaining :rtype: Pocket """ self._add_action('add') return self
python
def bulk_add(self, item_id, ref_id=None, tags=None, time=None, title=None, url=None): """ Add an item to list See: https://getpocket.com/developer/docs/v3/modify :param item_id: int :param ref_id: tweet_id :param tags: list of tags :param time: time of action :param title: given title :param url: item url :return: self for chaining :rtype: Pocket """ self._add_action('add') return self
Add an item to list See: https://getpocket.com/developer/docs/v3/modify :param item_id: int :param ref_id: tweet_id :param tags: list of tags :param time: time of action :param title: given title :param url: item url :return: self for chaining :rtype: Pocket
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L61-L76
rakanalh/pocket-api
pocket/__init__.py
Pocket._add_action
def _add_action(self, action): """ Register an action into bulk :param action: action name """ kwargs = self._get_method_params() kwargs['action'] = action self._bulk_actions.append(kwargs)
python
def _add_action(self, action): """ Register an action into bulk :param action: action name """ kwargs = self._get_method_params() kwargs['action'] = action self._bulk_actions.append(kwargs)
Register an action into bulk :param action: action name
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L251-L260
rakanalh/pocket-api
pocket/__init__.py
Pocket._make_request
def _make_request(self, action): """ Perform the request :param action: action name :return: a dict containing the request result :rtype: dict """ if isinstance(action, list): kwargs = {'actions': action} action = 'send' else: kwargs = self._get_method_params() kwargs.update({ 'consumer_key': self._consumer_key, 'access_token': self._access_token }) response = requests.post( self._get_url(action), json=kwargs, headers=self._get_headers() ) if response.status_code != requests.codes.ok: raise self._make_exception(response) return response.json()
python
def _make_request(self, action): """ Perform the request :param action: action name :return: a dict containing the request result :rtype: dict """ if isinstance(action, list): kwargs = {'actions': action} action = 'send' else: kwargs = self._get_method_params() kwargs.update({ 'consumer_key': self._consumer_key, 'access_token': self._access_token }) response = requests.post( self._get_url(action), json=kwargs, headers=self._get_headers() ) if response.status_code != requests.codes.ok: raise self._make_exception(response) return response.json()
Perform the request :param action: action name :return: a dict containing the request result :rtype: dict
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L262-L289
rakanalh/pocket-api
pocket/__init__.py
Pocket._get_method_params
def _get_method_params(self): """ This method makes reading and filtering each method implemented in this class a more general approach. It reads the previous frame from Python and filters the params passed to the caller of _make_request. :return: a dictionary of caller's parameters and values :rtype: dict """ caller = sys._getframe(2) var_names = list(caller.f_code.co_varnames) caller_locals = caller.f_locals var_names.remove('self') kwargs = {key: value for key, value in caller_locals.items() if key in var_names and value is not None} return kwargs
python
def _get_method_params(self): """ This method makes reading and filtering each method implemented in this class a more general approach. It reads the previous frame from Python and filters the params passed to the caller of _make_request. :return: a dictionary of caller's parameters and values :rtype: dict """ caller = sys._getframe(2) var_names = list(caller.f_code.co_varnames) caller_locals = caller.f_locals var_names.remove('self') kwargs = {key: value for key, value in caller_locals.items() if key in var_names and value is not None} return kwargs
This method makes reading and filtering each method implemented in this class a more general approach. It reads the previous frame from Python and filters the params passed to the caller of _make_request. :return: a dictionary of caller's parameters and values :rtype: dict
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L291-L307
rakanalh/pocket-api
pocket/__init__.py
Pocket._make_exception
def _make_exception(self, response): """ In case of exception, construct the exception object that holds all important values returned by the response. :return: The exception instance :rtype: PocketException """ headers = response.headers limit_headers = [] if 'X-Limit-User-Limit' in headers: limit_headers = [ headers['X-Limit-User-Limit'], headers['X-Limit-User-Remaining'], headers['X-Limit-User-Reset'], headers['X-Limit-Key-Limit'], headers['X-Limit-Key-Remaining'], headers['X-Limit-Key-Reset'] ] x_error_code = int(headers['X-Error-Code']) exc = PocketException if x_error_code in self.auth_error_codes: exc = PocketAutException return exc( response.status_code, x_error_code, headers['X-Error'], *limit_headers )
python
def _make_exception(self, response): """ In case of exception, construct the exception object that holds all important values returned by the response. :return: The exception instance :rtype: PocketException """ headers = response.headers limit_headers = [] if 'X-Limit-User-Limit' in headers: limit_headers = [ headers['X-Limit-User-Limit'], headers['X-Limit-User-Remaining'], headers['X-Limit-User-Reset'], headers['X-Limit-Key-Limit'], headers['X-Limit-Key-Remaining'], headers['X-Limit-Key-Reset'] ] x_error_code = int(headers['X-Error-Code']) exc = PocketException if x_error_code in self.auth_error_codes: exc = PocketAutException return exc( response.status_code, x_error_code, headers['X-Error'], *limit_headers )
In case of exception, construct the exception object that holds all important values returned by the response. :return: The exception instance :rtype: PocketException
https://github.com/rakanalh/pocket-api/blob/d8222dd34e3aa5e545f9b8ba407fa277c734ab82/pocket/__init__.py#L329-L360
Alir3z4/python-currencies
currencies/__init__.py
Currency.set_money_currency
def set_money_currency(self, money_currency): """ :type money_currency: str """ if money_currency not in self.money_formats: raise CurrencyDoesNotExist self.money_currency = money_currency
python
def set_money_currency(self, money_currency): """ :type money_currency: str """ if money_currency not in self.money_formats: raise CurrencyDoesNotExist self.money_currency = money_currency
:type money_currency: str
https://github.com/Alir3z4/python-currencies/blob/f8790c4da5df405bd23c63c0d2b02a417424d835/currencies/__init__.py#L26-L33
Alir3z4/python-currencies
currencies/__init__.py
Currency.get_money_format
def get_money_format(self, amount): """ :type amount: int or float or str Usage: >>> currency = Currency('USD') >>> currency.get_money_format(13) >>> '$13' >>> currency.get_money_format(13.99) >>> '$13.99' >>> currency.get_money_format('13,2313,33') >>> '$13,2313,33' :rtype: str """ return self.money_formats[ self.get_money_currency() ]['money_format'].format(amount=amount)
python
def get_money_format(self, amount): """ :type amount: int or float or str Usage: >>> currency = Currency('USD') >>> currency.get_money_format(13) >>> '$13' >>> currency.get_money_format(13.99) >>> '$13.99' >>> currency.get_money_format('13,2313,33') >>> '$13,2313,33' :rtype: str """ return self.money_formats[ self.get_money_currency() ]['money_format'].format(amount=amount)
:type amount: int or float or str Usage: >>> currency = Currency('USD') >>> currency.get_money_format(13) >>> '$13' >>> currency.get_money_format(13.99) >>> '$13.99' >>> currency.get_money_format('13,2313,33') >>> '$13,2313,33' :rtype: str
https://github.com/Alir3z4/python-currencies/blob/f8790c4da5df405bd23c63c0d2b02a417424d835/currencies/__init__.py#L48-L65