language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Monitors, dict): for key, value in self.items(): result[key] = value return result
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Monitors, dict): for key, value in self.items(): result[key] = value return result
Python
def auto_boot(self): """Gets the auto_boot of this Director. # noqa: E501 :return: The auto_boot of this Director. # noqa: E501 :rtype: bool """ return self._auto_boot
def auto_boot(self): """Gets the auto_boot of this Director. # noqa: E501 :return: The auto_boot of this Director. # noqa: E501 :rtype: bool """ return self._auto_boot
Python
def auto_restart(self): """Gets the auto_restart of this Director. # noqa: E501 :return: The auto_restart of this Director. # noqa: E501 :rtype: bool """ return self._auto_restart
def auto_restart(self): """Gets the auto_restart of this Director. # noqa: E501 :return: The auto_restart of this Director. # noqa: E501 :rtype: bool """ return self._auto_restart
Python
def communication_status(self): """Gets the communication_status of this Director. # noqa: E501 :return: The communication_status of this Director. # noqa: E501 :rtype: CommunicationStatus """ return self._communication_status
def communication_status(self): """Gets the communication_status of this Director. # noqa: E501 :return: The communication_status of this Director. # noqa: E501 :rtype: CommunicationStatus """ return self._communication_status
Python
def communication_status(self, communication_status): """Sets the communication_status of this Director. :param communication_status: The communication_status of this Director. # noqa: E501 :type: CommunicationStatus """ self._communication_status = communication_status
def communication_status(self, communication_status): """Sets the communication_status of this Director. :param communication_status: The communication_status of this Director. # noqa: E501 :type: CommunicationStatus """ self._communication_status = communication_status
Python
def director_count(self): """Gets the director_count of this Director. # noqa: E501 :return: The director_count of this Director. # noqa: E501 :rtype: int """ return self._director_count
def director_count(self): """Gets the director_count of this Director. # noqa: E501 :return: The director_count of this Director. # noqa: E501 :rtype: int """ return self._director_count
Python
def director_count(self, director_count): """Sets the director_count of this Director. :param director_count: The director_count of this Director. # noqa: E501 :type: int """ self._director_count = director_count
def director_count(self, director_count): """Sets the director_count of this Director. :param director_count: The director_count of this Director. # noqa: E501 :type: int """ self._director_count = director_count
Python
def director_family(self): """Gets the director_family of this Director. # noqa: E501 :return: The director_family of this Director. # noqa: E501 :rtype: str """ return self._director_family
def director_family(self): """Gets the director_family of this Director. # noqa: E501 :return: The director_family of this Director. # noqa: E501 :rtype: str """ return self._director_family
Python
def director_family(self, director_family): """Sets the director_family of this Director. :param director_family: The director_family of this Director. # noqa: E501 :type: str """ self._director_family = director_family
def director_family(self, director_family): """Sets the director_family of this Director. :param director_family: The director_family of this Director. # noqa: E501 :type: str """ self._director_family = director_family
Python
def director_id(self, director_id): """Sets the director_id of this Director. :param director_id: The director_id of this Director. # noqa: E501 :type: str """ self._director_id = director_id
def director_id(self, director_id): """Sets the director_id of this Director. :param director_id: The director_id of this Director. # noqa: E501 :type: str """ self._director_id = director_id
Python
def director_type(self): """Gets the director_type of this Director. # noqa: E501 :return: The director_type of this Director. # noqa: E501 :rtype: str """ return self._director_type
def director_type(self): """Gets the director_type of this Director. # noqa: E501 :return: The director_type of this Director. # noqa: E501 :rtype: str """ return self._director_type
Python
def director_type(self, director_type): """Sets the director_type of this Director. :param director_type: The director_type of this Director. # noqa: E501 :type: str """ self._director_type = director_type
def director_type(self, director_type): """Sets the director_type of this Director. :param director_type: The director_type of this Director. # noqa: E501 :type: str """ self._director_type = director_type
Python
def failover_hostname(self): """Gets the failover_hostname of this Director. # noqa: E501 :return: The failover_hostname of this Director. # noqa: E501 :rtype: str """ return self._failover_hostname
def failover_hostname(self): """Gets the failover_hostname of this Director. # noqa: E501 :return: The failover_hostname of this Director. # noqa: E501 :rtype: str """ return self._failover_hostname
Python
def failover_hostname(self, failover_hostname): """Sets the failover_hostname of this Director. :param failover_hostname: The failover_hostname of this Director. # noqa: E501 :type: str """ self._failover_hostname = failover_hostname
def failover_hostname(self, failover_hostname): """Sets the failover_hostname of this Director. :param failover_hostname: The failover_hostname of this Director. # noqa: E501 :type: str """ self._failover_hostname = failover_hostname
Python
def firmware_uptime(self): """Gets the firmware_uptime of this Director. # noqa: E501 :return: The firmware_uptime of this Director. # noqa: E501 :rtype: int """ return self._firmware_uptime
def firmware_uptime(self): """Gets the firmware_uptime of this Director. # noqa: E501 :return: The firmware_uptime of this Director. # noqa: E501 :rtype: int """ return self._firmware_uptime
Python
def firmware_uptime(self, firmware_uptime): """Sets the firmware_uptime of this Director. :param firmware_uptime: The firmware_uptime of this Director. # noqa: E501 :type: int """ self._firmware_uptime = firmware_uptime
def firmware_uptime(self, firmware_uptime): """Sets the firmware_uptime of this Director. :param firmware_uptime: The firmware_uptime of this Director. # noqa: E501 :type: int """ self._firmware_uptime = firmware_uptime
Python
def os_uptime(self): """Gets the os_uptime of this Director. # noqa: E501 :return: The os_uptime of this Director. # noqa: E501 :rtype: int """ return self._os_uptime
def os_uptime(self): """Gets the os_uptime of this Director. # noqa: E501 :return: The os_uptime of this Director. # noqa: E501 :rtype: int """ return self._os_uptime
Python
def run_level(self): """Gets the run_level of this Director. # noqa: E501 :return: The run_level of this Director. # noqa: E501 :rtype: int """ return self._run_level
def run_level(self): """Gets the run_level of this Director. # noqa: E501 :return: The run_level of this Director. # noqa: E501 :rtype: int """ return self._run_level
Python
def system_time(self, system_time): """Sets the system_time of this Director. :param system_time: The system_time of this Director. # noqa: E501 :type: str """ self._system_time = system_time
def system_time(self, system_time): """Sets the system_time of this Director. :param system_time: The system_time of this Director. # noqa: E501 :type: str """ self._system_time = system_time
Python
def temperature_threshold_exceeded(self): """Gets the temperature_threshold_exceeded of this Director. # noqa: E501 :return: The temperature_threshold_exceeded of this Director. # noqa: E501 :rtype: bool """ return self._temperature_threshold_exceeded
def temperature_threshold_exceeded(self): """Gets the temperature_threshold_exceeded of this Director. # noqa: E501 :return: The temperature_threshold_exceeded of this Director. # noqa: E501 :rtype: bool """ return self._temperature_threshold_exceeded
Python
def voltage_threshold_exceeded(self): """Gets the voltage_threshold_exceeded of this Director. # noqa: E501 :return: The voltage_threshold_exceeded of this Director. # noqa: E501 :rtype: bool """ return self._voltage_threshold_exceeded
def voltage_threshold_exceeded(self): """Gets the voltage_threshold_exceeded of this Director. # noqa: E501 :return: The voltage_threshold_exceeded of this Director. # noqa: E501 :rtype: bool """ return self._voltage_threshold_exceeded
Python
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Director, dict): for key, value in self.items(): result[key] = value return result
def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Director, dict): for key, value in self.items(): result[key] = value return result
Python
def create_device(self, cluster_name, device_payload, **kwargs): # noqa: E501 """Create a new Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_device(cluster_name, device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param DevicePayload device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: Device If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_device_with_http_info(cluster_name, device_payload, **kwargs) # noqa: E501 else: (data) = self.create_device_with_http_info(cluster_name, device_payload, **kwargs) # noqa: E501 return data
def create_device(self, cluster_name, device_payload, **kwargs): # noqa: E501 """Create a new Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_device(cluster_name, device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param DevicePayload device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: Device If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_device_with_http_info(cluster_name, device_payload, **kwargs) # noqa: E501 else: (data) = self.create_device_with_http_info(cluster_name, device_payload, **kwargs) # noqa: E501 return data
Python
def create_device_with_http_info(self, cluster_name, device_payload, **kwargs): # noqa: E501 """Create a new Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_device_with_http_info(cluster_name, device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param DevicePayload device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: Device If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'device_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `create_device`") # noqa: E501 # verify the required parameter 'device_payload' is set if ('device_payload' not in params or params['device_payload'] is None): raise ValueError("Missing the required parameter `device_payload` when calling `create_device`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'device_payload' in params: body_params = params['device_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/devices', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Device', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def create_device_with_http_info(self, cluster_name, device_payload, **kwargs): # noqa: E501 """Create a new Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_device_with_http_info(cluster_name, device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param DevicePayload device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: Device If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'device_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `create_device`") # noqa: E501 # verify the required parameter 'device_payload' is set if ('device_payload' not in params or params['device_payload'] is None): raise ValueError("Missing the required parameter `device_payload` when calling `create_device`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'device_payload' in params: body_params = params['device_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/devices', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Device', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def delete_device(self, cluster_name, name, **kwargs): # noqa: E501 """Deletes a single Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_device(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_device_with_http_info(cluster_name, name, **kwargs) # noqa: E501 else: (data) = self.delete_device_with_http_info(cluster_name, name, **kwargs) # noqa: E501 return data
def delete_device(self, cluster_name, name, **kwargs): # noqa: E501 """Deletes a single Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_device(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_device_with_http_info(cluster_name, name, **kwargs) # noqa: E501 else: (data) = self.delete_device_with_http_info(cluster_name, name, **kwargs) # noqa: E501 return data
Python
def delete_device_with_http_info(self, cluster_name, name, **kwargs): # noqa: E501 """Deletes a single Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_device_with_http_info(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `delete_device`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_device`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/devices/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def delete_device_with_http_info(self, cluster_name, name, **kwargs): # noqa: E501 """Deletes a single Device # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_device_with_http_info(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `delete_device`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_device`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/devices/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def rediscover_storage_array(self, cluster_name, name, **kwargs): # noqa: E501 """Rediscover LUNs on the array # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rediscover_storage_array(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: StorageArray If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.rediscover_storage_array_with_http_info(cluster_name, name, **kwargs) # noqa: E501 else: (data) = self.rediscover_storage_array_with_http_info(cluster_name, name, **kwargs) # noqa: E501 return data
def rediscover_storage_array(self, cluster_name, name, **kwargs): # noqa: E501 """Rediscover LUNs on the array # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rediscover_storage_array(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: StorageArray If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.rediscover_storage_array_with_http_info(cluster_name, name, **kwargs) # noqa: E501 else: (data) = self.rediscover_storage_array_with_http_info(cluster_name, name, **kwargs) # noqa: E501 return data
Python
def rediscover_storage_array_with_http_info(self, cluster_name, name, **kwargs): # noqa: E501 """Rediscover LUNs on the array # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rediscover_storage_array_with_http_info(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: StorageArray If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method rediscover_storage_array" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `rediscover_storage_array`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `rediscover_storage_array`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/storage_arrays/{name}/rediscover', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='StorageArray', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def rediscover_storage_array_with_http_info(self, cluster_name, name, **kwargs): # noqa: E501 """Rediscover LUNs on the array # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.rediscover_storage_array_with_http_info(cluster_name, name, async_req=True) >>> result = thread.get() :param async_req bool :param str cluster_name: The name of the cluster (required) :param str name: The name of a specific instance of the resource (required) :return: StorageArray If the method is called asynchronously, returns the request thread. """ all_params = ['cluster_name', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method rediscover_storage_array" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'cluster_name' is set if ('cluster_name' not in params or params['cluster_name'] is None): raise ValueError("Missing the required parameter `cluster_name` when calling `rediscover_storage_array`") # noqa: E501 # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `rediscover_storage_array`") # noqa: E501 collection_formats = {} path_params = {} if 'cluster_name' in params: path_params['cluster_name'] = params['cluster_name'] # noqa: E501 if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/clusters/{cluster_name}/storage_arrays/{name}/rediscover', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='StorageArray', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def create_distributed_consistency_group(self, distributed_consistency_group_payload, **kwargs): # noqa: E501 """Create a new distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_consistency_group(distributed_consistency_group_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedConsistencyGroupPayload distributed_consistency_group_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, **kwargs) # noqa: E501 return data
def create_distributed_consistency_group(self, distributed_consistency_group_payload, **kwargs): # noqa: E501 """Create a new distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_consistency_group(distributed_consistency_group_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedConsistencyGroupPayload distributed_consistency_group_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, **kwargs) # noqa: E501 return data
Python
def create_distributed_consistency_group_with_http_info(self, distributed_consistency_group_payload, **kwargs): # noqa: E501 """Create a new distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedConsistencyGroupPayload distributed_consistency_group_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_consistency_group_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_consistency_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_consistency_group_payload' is set if ('distributed_consistency_group_payload' not in params or params['distributed_consistency_group_payload'] is None): raise ValueError("Missing the required parameter `distributed_consistency_group_payload` when calling `create_distributed_consistency_group`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_consistency_group_payload' in params: body_params = params['distributed_consistency_group_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ConsistencyGroup', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def create_distributed_consistency_group_with_http_info(self, distributed_consistency_group_payload, **kwargs): # noqa: E501 """Create a new distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_consistency_group_with_http_info(distributed_consistency_group_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedConsistencyGroupPayload distributed_consistency_group_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_consistency_group_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_consistency_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_consistency_group_payload' is set if ('distributed_consistency_group_payload' not in params or params['distributed_consistency_group_payload'] is None): raise ValueError("Missing the required parameter `distributed_consistency_group_payload` when calling `create_distributed_consistency_group`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_consistency_group_payload' in params: body_params = params['distributed_consistency_group_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ConsistencyGroup', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def create_distributed_device(self, distributed_device_payload, **kwargs): # noqa: E501 """Create a new DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_device(distributed_device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedDevicePayload distributed_device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_device_with_http_info(distributed_device_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_device_with_http_info(distributed_device_payload, **kwargs) # noqa: E501 return data
def create_distributed_device(self, distributed_device_payload, **kwargs): # noqa: E501 """Create a new DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_device(distributed_device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedDevicePayload distributed_device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_device_with_http_info(distributed_device_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_device_with_http_info(distributed_device_payload, **kwargs) # noqa: E501 return data
Python
def create_distributed_device_with_http_info(self, distributed_device_payload, **kwargs): # noqa: E501 """Create a new DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_device_with_http_info(distributed_device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedDevicePayload distributed_device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_device_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_device_payload' is set if ('distributed_device_payload' not in params or params['distributed_device_payload'] is None): raise ValueError("Missing the required parameter `distributed_device_payload` when calling `create_distributed_device`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_device_payload' in params: body_params = params['distributed_device_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DistributedDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def create_distributed_device_with_http_info(self, distributed_device_payload, **kwargs): # noqa: E501 """Create a new DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_device_with_http_info(distributed_device_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedDevicePayload distributed_device_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_device_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_device_payload' is set if ('distributed_device_payload' not in params or params['distributed_device_payload'] is None): raise ValueError("Missing the required parameter `distributed_device_payload` when calling `create_distributed_device`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_device_payload' in params: body_params = params['distributed_device_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DistributedDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def create_distributed_virtual_volume(self, distributed_virtual_volume_payload, **kwargs): # noqa: E501 """Create a new distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_virtual_volume(distributed_virtual_volume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedVirtualVolumePayload distributed_virtual_volume_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, **kwargs) # noqa: E501 return data
def create_distributed_virtual_volume(self, distributed_virtual_volume_payload, **kwargs): # noqa: E501 """Create a new distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_virtual_volume(distributed_virtual_volume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedVirtualVolumePayload distributed_virtual_volume_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, **kwargs) # noqa: E501 else: (data) = self.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, **kwargs) # noqa: E501 return data
Python
def create_distributed_virtual_volume_with_http_info(self, distributed_virtual_volume_payload, **kwargs): # noqa: E501 """Create a new distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedVirtualVolumePayload distributed_virtual_volume_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_virtual_volume_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_virtual_volume_payload' is set if ('distributed_virtual_volume_payload' not in params or params['distributed_virtual_volume_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_payload` when calling `create_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_payload' in params: body_params = params['distributed_virtual_volume_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def create_distributed_virtual_volume_with_http_info(self, distributed_virtual_volume_payload, **kwargs): # noqa: E501 """Create a new distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_distributed_virtual_volume_with_http_info(distributed_virtual_volume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param DistributedVirtualVolumePayload distributed_virtual_volume_payload: (required) :param str x_include_object: When passed as part of a POST request, controls whether the representation of the newly created object is included in the response. Defaults to 'true' which will include the object in the response. This header is useful because refreshing the newly created object is usually the slowest part of a POST operation. :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['distributed_virtual_volume_payload', 'x_include_object'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'distributed_virtual_volume_payload' is set if ('distributed_virtual_volume_payload' not in params or params['distributed_virtual_volume_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_payload` when calling `create_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} if 'x_include_object' in params: header_params['X-Include-Object'] = params['x_include_object'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_payload' in params: body_params = params['distributed_virtual_volume_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def delete_distributed_consistency_group(self, name, **kwargs): # noqa: E501 """Deletes a single distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_consistency_group(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_consistency_group_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_consistency_group_with_http_info(name, **kwargs) # noqa: E501 return data
def delete_distributed_consistency_group(self, name, **kwargs): # noqa: E501 """Deletes a single distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_consistency_group(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_consistency_group_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_consistency_group_with_http_info(name, **kwargs) # noqa: E501 return data
Python
def delete_distributed_consistency_group_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_consistency_group_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_consistency_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_consistency_group`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def delete_distributed_consistency_group_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single distributed ConsistencyGroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_consistency_group_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_consistency_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_consistency_group`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def delete_distributed_device(self, name, **kwargs): # noqa: E501 """Deletes a single DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_device(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_device_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_device_with_http_info(name, **kwargs) # noqa: E501 return data
def delete_distributed_device(self, name, **kwargs): # noqa: E501 """Deletes a single DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_device(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_device_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_device_with_http_info(name, **kwargs) # noqa: E501 return data
Python
def delete_distributed_device_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_device_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_device`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def delete_distributed_device_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single DistributedDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_device_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_device" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_device`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def delete_distributed_virtual_volume(self, name, **kwargs): # noqa: E501 """Deletes a single distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_virtual_volume(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_virtual_volume_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_virtual_volume_with_http_info(name, **kwargs) # noqa: E501 return data
def delete_distributed_virtual_volume(self, name, **kwargs): # noqa: E501 """Deletes a single distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_virtual_volume(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_distributed_virtual_volume_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_distributed_virtual_volume_with_http_info(name, **kwargs) # noqa: E501 return data
Python
def delete_distributed_virtual_volume_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_virtual_volume_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def delete_distributed_virtual_volume_with_http_info(self, name, **kwargs): # noqa: E501 """Deletes a single distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_distributed_virtual_volume_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `delete_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def expand_distributed_virtual_volume(self, name, distributed_virtual_volume_expand_payload, **kwargs): # noqa: E501 """Expand the capacity of a distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.expand_distributed_virtual_volume(name, distributed_virtual_volume_expand_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param DistributedVirtualVolumeExpandPayload distributed_virtual_volume_expand_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, **kwargs) # noqa: E501 else: (data) = self.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, **kwargs) # noqa: E501 return data
def expand_distributed_virtual_volume(self, name, distributed_virtual_volume_expand_payload, **kwargs): # noqa: E501 """Expand the capacity of a distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.expand_distributed_virtual_volume(name, distributed_virtual_volume_expand_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param DistributedVirtualVolumeExpandPayload distributed_virtual_volume_expand_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, **kwargs) # noqa: E501 else: (data) = self.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, **kwargs) # noqa: E501 return data
Python
def expand_distributed_virtual_volume_with_http_info(self, name, distributed_virtual_volume_expand_payload, **kwargs): # noqa: E501 """Expand the capacity of a distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param DistributedVirtualVolumeExpandPayload distributed_virtual_volume_expand_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'distributed_virtual_volume_expand_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method expand_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `expand_distributed_virtual_volume`") # noqa: E501 # verify the required parameter 'distributed_virtual_volume_expand_payload' is set if ('distributed_virtual_volume_expand_payload' not in params or params['distributed_virtual_volume_expand_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_expand_payload` when calling `expand_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_expand_payload' in params: body_params = params['distributed_virtual_volume_expand_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}/expand', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def expand_distributed_virtual_volume_with_http_info(self, name, distributed_virtual_volume_expand_payload, **kwargs): # noqa: E501 """Expand the capacity of a distributed VirtualVolume # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.expand_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_expand_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param DistributedVirtualVolumeExpandPayload distributed_virtual_volume_expand_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'distributed_virtual_volume_expand_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method expand_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `expand_distributed_virtual_volume`") # noqa: E501 # verify the required parameter 'distributed_virtual_volume_expand_payload' is set if ('distributed_virtual_volume_expand_payload' not in params or params['distributed_virtual_volume_expand_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_expand_payload` when calling `expand_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_expand_payload' in params: body_params = params['distributed_virtual_volume_expand_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}/expand', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def patch_distributed_virtual_volume(self, name, distributed_virtual_volume_patch_payload, **kwargs): # noqa: E501 """Update attributes on a distributed VirtualVolume # noqa: E501 Settable attributes: 'name' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_distributed_virtual_volume(name, distributed_virtual_volume_patch_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param list[JsonPatchOp] distributed_virtual_volume_patch_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, **kwargs) # noqa: E501 else: (data) = self.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, **kwargs) # noqa: E501 return data
def patch_distributed_virtual_volume(self, name, distributed_virtual_volume_patch_payload, **kwargs): # noqa: E501 """Update attributes on a distributed VirtualVolume # noqa: E501 Settable attributes: 'name' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_distributed_virtual_volume(name, distributed_virtual_volume_patch_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param list[JsonPatchOp] distributed_virtual_volume_patch_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, **kwargs) # noqa: E501 else: (data) = self.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, **kwargs) # noqa: E501 return data
Python
def patch_distributed_virtual_volume_with_http_info(self, name, distributed_virtual_volume_patch_payload, **kwargs): # noqa: E501 """Update attributes on a distributed VirtualVolume # noqa: E501 Settable attributes: 'name' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param list[JsonPatchOp] distributed_virtual_volume_patch_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'distributed_virtual_volume_patch_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_distributed_virtual_volume`") # noqa: E501 # verify the required parameter 'distributed_virtual_volume_patch_payload' is set if ('distributed_virtual_volume_patch_payload' not in params or params['distributed_virtual_volume_patch_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_patch_payload` when calling `patch_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_patch_payload' in params: body_params = params['distributed_virtual_volume_patch_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def patch_distributed_virtual_volume_with_http_info(self, name, distributed_virtual_volume_patch_payload, **kwargs): # noqa: E501 """Update attributes on a distributed VirtualVolume # noqa: E501 Settable attributes: 'name' # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_distributed_virtual_volume_with_http_info(name, distributed_virtual_volume_patch_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param list[JsonPatchOp] distributed_virtual_volume_patch_payload: (required) :return: VirtualVolume If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'distributed_virtual_volume_patch_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_distributed_virtual_volume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `patch_distributed_virtual_volume`") # noqa: E501 # verify the required parameter 'distributed_virtual_volume_patch_payload' is set if ('distributed_virtual_volume_patch_payload' not in params or params['distributed_virtual_volume_patch_payload'] is None): raise ValueError("Missing the required parameter `distributed_virtual_volume_patch_payload` when calling `patch_distributed_virtual_volume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'distributed_virtual_volume_patch_payload' in params: body_params = params['distributed_virtual_volume_patch_payload'] # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_virtual_volumes/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VirtualVolume', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def resume(self, name, cg_resume_payload, **kwargs): # noqa: E501 """Manually resume I/O to the virtual-volumes in a distributed consistency group. In a cluster-partition scenario where the ruleset does not indicate an automatic winner, this endpoint will select the winning cluster where I/O should continue. After a cluster-partition is resolved, in the case that auto-resume-at-loser is set to false, this endpoint will resume I/O on the losing cluster. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume(name, cg_resume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param CgResumePayload cg_resume_payload: (required) :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_with_http_info(name, cg_resume_payload, **kwargs) # noqa: E501 else: (data) = self.resume_with_http_info(name, cg_resume_payload, **kwargs) # noqa: E501 return data
def resume(self, name, cg_resume_payload, **kwargs): # noqa: E501 """Manually resume I/O to the virtual-volumes in a distributed consistency group. In a cluster-partition scenario where the ruleset does not indicate an automatic winner, this endpoint will select the winning cluster where I/O should continue. After a cluster-partition is resolved, in the case that auto-resume-at-loser is set to false, this endpoint will resume I/O on the losing cluster. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume(name, cg_resume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param CgResumePayload cg_resume_payload: (required) :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_with_http_info(name, cg_resume_payload, **kwargs) # noqa: E501 else: (data) = self.resume_with_http_info(name, cg_resume_payload, **kwargs) # noqa: E501 return data
Python
def resume_with_http_info(self, name, cg_resume_payload, **kwargs): # noqa: E501 """Manually resume I/O to the virtual-volumes in a distributed consistency group. In a cluster-partition scenario where the ruleset does not indicate an automatic winner, this endpoint will select the winning cluster where I/O should continue. After a cluster-partition is resolved, in the case that auto-resume-at-loser is set to false, this endpoint will resume I/O on the losing cluster. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_with_http_info(name, cg_resume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param CgResumePayload cg_resume_payload: (required) :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'cg_resume_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `resume`") # noqa: E501 # verify the required parameter 'cg_resume_payload' is set if ('cg_resume_payload' not in params or params['cg_resume_payload'] is None): raise ValueError("Missing the required parameter `cg_resume_payload` when calling `resume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'cg_resume_payload' in params: body_params = params['cg_resume_payload'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups/{name}/resume', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ConsistencyGroup', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def resume_with_http_info(self, name, cg_resume_payload, **kwargs): # noqa: E501 """Manually resume I/O to the virtual-volumes in a distributed consistency group. In a cluster-partition scenario where the ruleset does not indicate an automatic winner, this endpoint will select the winning cluster where I/O should continue. After a cluster-partition is resolved, in the case that auto-resume-at-loser is set to false, this endpoint will resume I/O on the losing cluster. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_with_http_info(name, cg_resume_payload, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :param CgResumePayload cg_resume_payload: (required) :return: ConsistencyGroup If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'cg_resume_payload'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `resume`") # noqa: E501 # verify the required parameter 'cg_resume_payload' is set if ('cg_resume_payload' not in params or params['cg_resume_payload'] is None): raise ValueError("Missing the required parameter `cg_resume_payload` when calling `resume`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'cg_resume_payload' in params: body_params = params['cg_resume_payload'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_consistency_groups/{name}/resume', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ConsistencyGroup', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
def resume_link_up(self, name, **kwargs): # noqa: E501 """Manually resume I/O to a distributed device that supports a distributed virtual-volume. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_link_up(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_link_up_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.resume_link_up_with_http_info(name, **kwargs) # noqa: E501 return data
def resume_link_up(self, name, **kwargs): # noqa: E501 """Manually resume I/O to a distributed device that supports a distributed virtual-volume. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_link_up(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.resume_link_up_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.resume_link_up_with_http_info(name, **kwargs) # noqa: E501 return data
Python
def resume_link_up_with_http_info(self, name, **kwargs): # noqa: E501 """Manually resume I/O to a distributed device that supports a distributed virtual-volume. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_link_up_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume_link_up" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `resume_link_up`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices/{name}/resume', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DistributedDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def resume_link_up_with_http_info(self, name, **kwargs): # noqa: E501 """Manually resume I/O to a distributed device that supports a distributed virtual-volume. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.resume_link_up_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of a specific instance of the resource (required) :return: DistributedDevice If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method resume_link_up" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `resume_link_up`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['basicAuth', 'jwtAuth'] # noqa: E501 return self.api_client.call_api( '/distributed_storage/distributed_devices/{name}/resume', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DistributedDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Python
async def close_postgres_connection(state: State) -> None: """ Closes the postgres connection stored in the application state. This function is called during shutdown. """ if hasattr(state, "postgres_connection"): engine = cast(AsyncEngine, state.postgres_connection) await engine.dispose()
async def close_postgres_connection(state: State) -> None: """ Closes the postgres connection stored in the application state. This function is called during shutdown. """ if hasattr(state, "postgres_connection"): engine = cast(AsyncEngine, state.postgres_connection) await engine.dispose()
Python
def create_async_session(state: State) -> AsyncSession: """ Creates a sessionmaler from the given connection """ if hasattr(state, "postgres_connection"): postgres_connection = cast(AsyncEngine, state.postgres_connection) return sessionmaker(postgres_connection, class_=AsyncSession, expire_on_commit=False)() raise RuntimeError("postgres_connection has not been set in state")
def create_async_session(state: State) -> AsyncSession: """ Creates a sessionmaler from the given connection """ if hasattr(state, "postgres_connection"): postgres_connection = cast(AsyncEngine, state.postgres_connection) return sessionmaker(postgres_connection, class_=AsyncSession, expire_on_commit=False)() raise RuntimeError("postgres_connection has not been set in state")
Python
def predict(): ''' For rendering results on HTML GUI ''' int_features = [float(x) for x in request.form.values()] final_features = [np.array(int_features)] prediction = model.predict(final_features) output = round(prediction[0], 2) if output==1: return render_template('index.html', prediction_text='Patient is positive') else: return render_template('index.html', prediction_text='Patient is non positive')
def predict(): ''' For rendering results on HTML GUI ''' int_features = [float(x) for x in request.form.values()] final_features = [np.array(int_features)] prediction = model.predict(final_features) output = round(prediction[0], 2) if output==1: return render_template('index.html', prediction_text='Patient is positive') else: return render_template('index.html', prediction_text='Patient is non positive')
Python
def _preprocess(self, im_crops): """ TODO: 1. to float with scale from 0 to 1 2. resize to (64, 128) as Market1501 dataset did 3. concatenate to a numpy array 3. to torch Tensor 4. normalize """ def _resize(im, size): return cv2.resize(im.astype(np.float32) / 255., size) im_batch = torch.cat([ self.norm(_resize(im, self.size)).unsqueeze(0) for im in im_crops ], dim=0).float() return im_batch
def _preprocess(self, im_crops): """ TODO: 1. to float with scale from 0 to 1 2. resize to (64, 128) as Market1501 dataset did 3. concatenate to a numpy array 3. to torch Tensor 4. normalize """ def _resize(im, size): return cv2.resize(im.astype(np.float32) / 255., size) im_batch = torch.cat([ self.norm(_resize(im, self.size)).unsqueeze(0) for im in im_crops ], dim=0).float() return im_batch
Python
def main(): """Does general setup and calls exploit.""" if len(sys.argv) < 2: print(f'Usage: {sys.argv[0]} <mode>') sys.exit(0) try: context.binary = ELF(BINARY) except IOError: print(f'Failed to load binary ({BINARY})') mode = sys.argv[1] if mode == 'local': p = remote('localhost', PORT) elif mode == 'debug': p = gdb.debug('./src/chall', gdbscript='\n'.join(GDB_COMMANDS)) elif mode == 'remote': p = remote(HOST, PORT) else: print('Invalid mode') sys.exit(1) exploit(p, mode)
def main(): """Does general setup and calls exploit.""" if len(sys.argv) < 2: print(f'Usage: {sys.argv[0]} <mode>') sys.exit(0) try: context.binary = ELF(BINARY) except IOError: print(f'Failed to load binary ({BINARY})') mode = sys.argv[1] if mode == 'local': p = remote('localhost', PORT) elif mode == 'debug': p = gdb.debug('./src/chall', gdbscript='\n'.join(GDB_COMMANDS)) elif mode == 'remote': p = remote(HOST, PORT) else: print('Invalid mode') sys.exit(1) exploit(p, mode)
Python
def GetEmbeddings(self, x): """Looks up some embeddings from the embedding table. Args: x: matrix of word ids to look up Returns: word embedding vectors for the given ids. """ e = tf.nn.embedding_lookup(self.word_embeddings, x) if self.dropout_keep_prob: e = tf.nn.dropout(e, self.dropout_keep_prob) return e
def GetEmbeddings(self, x): """Looks up some embeddings from the embedding table. Args: x: matrix of word ids to look up Returns: word embedding vectors for the given ids. """ e = tf.nn.embedding_lookup(self.word_embeddings, x) if self.dropout_keep_prob: e = tf.nn.dropout(e, self.dropout_keep_prob) return e
Python
def MakeMat(self, word_list, pad_len=None): """Make a matrix to hold the character sequences in. Special start and end tokens are added to the beggining and end of each word. Args: word_list: A list of strings pad_len: Pad all character sequences to this length. If a word is longer than the pad_len it will be truncated. Returns: Array containing character sequences and a vector of sequence lengths. """ if not pad_len: pad_len = self.max_sequence_len # make the padded char mat the_words = [] word_lengths = [] for word in word_list: word_idx = [self.char_vocab[c] for c in util.Graphemes(word)] word_idx = ([self.char_vocab['<S>']] + word_idx[:pad_len-2] + [self.char_vocab['</S>']]) if len(word_idx) < pad_len: word_idx += [self.char_vocab['</S>']] * (pad_len - len(word_idx)) the_words.append(word_idx) word_lengths.append(min(pad_len, len(word)+2)) the_words = np.array(the_words) word_lengths = np.array(word_lengths) return the_words, word_lengths
def MakeMat(self, word_list, pad_len=None): """Make a matrix to hold the character sequences in. Special start and end tokens are added to the beggining and end of each word. Args: word_list: A list of strings pad_len: Pad all character sequences to this length. If a word is longer than the pad_len it will be truncated. Returns: Array containing character sequences and a vector of sequence lengths. """ if not pad_len: pad_len = self.max_sequence_len # make the padded char mat the_words = [] word_lengths = [] for word in word_list: word_idx = [self.char_vocab[c] for c in util.Graphemes(word)] word_idx = ([self.char_vocab['<S>']] + word_idx[:pad_len-2] + [self.char_vocab['</S>']]) if len(word_idx) < pad_len: word_idx += [self.char_vocab['</S>']] * (pad_len - len(word_idx)) the_words.append(word_idx) word_lengths.append(min(pad_len, len(word)+2)) the_words = np.array(the_words) word_lengths = np.array(word_lengths) return the_words, word_lengths
Python
def GetCell(hidden_size, num_proj=None, use_peepholes=False): """Helper function to make LSTM cells.""" layer = LSTMCell(hidden_size, num_proj=num_proj, use_peepholes=use_peepholes) layer = tf.nn.rnn.rnn_cell.DropoutWrapper(layer, output_keep_prob=dropout_keep_prob, input_keep_prob=dropout_keep_prob) return layer
def GetCell(hidden_size, num_proj=None, use_peepholes=False): """Helper function to make LSTM cells.""" layer = LSTMCell(hidden_size, num_proj=num_proj, use_peepholes=use_peepholes) layer = tf.nn.rnn.rnn_cell.DropoutWrapper(layer, output_keep_prob=dropout_keep_prob, input_keep_prob=dropout_keep_prob) return layer
Python
def reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, depth) lengths: A tensor of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ for input_ in input_seq: input_.set_shape(input_.get_shape().with_rank(2)) # Join into (time, batch_size, depth) s_joined = tf.pack(input_seq) # Reverse along dimension 0 s_reversed = tf.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = tf.unpack(s_reversed) return result
def reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, depth) lengths: A tensor of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ for input_ in input_seq: input_.set_shape(input_.get_shape().with_rank(2)) # Join into (time, batch_size, depth) s_joined = tf.pack(input_seq) # Reverse along dimension 0 s_reversed = tf.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = tf.unpack(s_reversed) return result
Python
def ByteVocab(cls): """Creates a vocab that has a token for each possible byte. It's useful to have a fixed byte vocab so that the subset of bytes that form the vocab is not dependent on the dataset being used. Thus, the learned byte embeddings can be reused on different datasets. """ c = '0123456789abcdef' tokens = ['<S>', '</S>'] for i in c: for j in c: tokens.append(i + j) return cls(tokens)
def ByteVocab(cls): """Creates a vocab that has a token for each possible byte. It's useful to have a fixed byte vocab so that the subset of bytes that form the vocab is not dependent on the dataset being used. Thus, the learned byte embeddings can be reused on different datasets. """ c = '0123456789abcdef' tokens = ['<S>', '</S>'] for i in c: for j in c: tokens.append(i + j) return cls(tokens)
Python
def _DoPredictions(self, in_size, mats, class_weights=None): """Takes in an array of states and calculates predictions. Get the cross-entropy for each example in the vector self._xent. Args: in_size: size of the hidden state vectors mats: list of hidden state vectors """ pred_mat = tf.get_variable('pred_mat', [in_size, self._out_vocab_size]) pred_bias = tf.get_variable('pred_bias', [self._out_vocab_size]) # Make a prediction on every word. def GetWordPred(o_): logits = tf.nn.xw_plus_b(o_, pred_mat, pred_bias) return tf.nn.softmax(logits) self.preds_by_word = tf.pack([GetWordPred(o_) for o_ in mats]) self.cs = self._mask / tf.reduce_sum(self._mask, 1, keep_dims=True) # The final prediction is the average of the predictions for each word # weighted by the individual confidence/utility scores. preds_weighted = tf.mul(tf.reshape(tf.transpose(self.cs), [-1, 1]), tf.reshape(self.preds_by_word, [-1, self._out_vocab_size])) preds_weighted_reshaped = tf.reshape(preds_weighted, self.preds_by_word.get_shape()) self.probs = tf.reduce_sum(preds_weighted_reshaped, 0) self._xent = _SafeXEnt(self.y, self.probs, class_weights=class_weights)
def _DoPredictions(self, in_size, mats, class_weights=None): """Takes in an array of states and calculates predictions. Get the cross-entropy for each example in the vector self._xent. Args: in_size: size of the hidden state vectors mats: list of hidden state vectors """ pred_mat = tf.get_variable('pred_mat', [in_size, self._out_vocab_size]) pred_bias = tf.get_variable('pred_bias', [self._out_vocab_size]) # Make a prediction on every word. def GetWordPred(o_): logits = tf.nn.xw_plus_b(o_, pred_mat, pred_bias) return tf.nn.softmax(logits) self.preds_by_word = tf.pack([GetWordPred(o_) for o_ in mats]) self.cs = self._mask / tf.reduce_sum(self._mask, 1, keep_dims=True) # The final prediction is the average of the predictions for each word # weighted by the individual confidence/utility scores. preds_weighted = tf.mul(tf.reshape(tf.transpose(self.cs), [-1, 1]), tf.reshape(self.preds_by_word, [-1, self._out_vocab_size])) preds_weighted_reshaped = tf.reshape(preds_weighted, self.preds_by_word.get_shape()) self.probs = tf.reduce_sum(preds_weighted_reshaped, 0) self._xent = _SafeXEnt(self.y, self.probs, class_weights=class_weights)
Python
def GetCell(): """Creates an LSTM cell with dropout.""" c = tf.nn.rnn_cell.LSTMCell(hidden_size, use_peepholes=model_params['peepholes'], num_proj=proj_size) if dropout_keep_prob is not None: c = tf.nn.rnn_cell.DropoutWrapper(c, input_keep_prob=dropout_keep_prob) return c
def GetCell(): """Creates an LSTM cell with dropout.""" c = tf.nn.rnn_cell.LSTMCell(hidden_size, use_peepholes=model_params['peepholes'], num_proj=proj_size) if dropout_keep_prob is not None: c = tf.nn.rnn_cell.DropoutWrapper(c, input_keep_prob=dropout_keep_prob) return c
Python
def _DoPredictions(self, in_size, mats, class_weights=None): """Takes in an array of states and calculates predictions. Get the cross-entropy for each example in the vector self._xent. Args: in_size: size of the hidden state vectors mats: list of hidden state vectors """ pred_mat = tf.get_variable('pred_mat', [in_size, self._out_vocab_size]) pred_bias = tf.get_variable('pred_bias', [self._out_vocab_size]) # Make a prediction on every word. def GetWordPred(o_): logits = tf.nn.xw_plus_b(o_, pred_mat, pred_bias) return tf.nn.softmax(logits) #self.preds_by_word1 = tf.pack([GetWordPred(o_) for o_ in mats]) #self.preds_by_word = tf.reshape(self.preds_by_word1, self.y.get_shape()) #self.probs = tf.mul(tf.expand_dims(self._mask,2), self.preds_by_word) self.preds_by_word = tf.pack([GetWordPred(o_) for o_ in mats]) self.preds_by_instance = tf.pack([self.preds_by_word[:,i,:] for i in range(self.preds_by_word.get_shape()[1])]) self.probs = tf.mul(tf.expand_dims(self._mask,2), self.preds_by_instance) self._xent = _SafeXEnt(self.y, self.probs, class_weights=class_weights, sumd=[1,2])
def _DoPredictions(self, in_size, mats, class_weights=None): """Takes in an array of states and calculates predictions. Get the cross-entropy for each example in the vector self._xent. Args: in_size: size of the hidden state vectors mats: list of hidden state vectors """ pred_mat = tf.get_variable('pred_mat', [in_size, self._out_vocab_size]) pred_bias = tf.get_variable('pred_bias', [self._out_vocab_size]) # Make a prediction on every word. def GetWordPred(o_): logits = tf.nn.xw_plus_b(o_, pred_mat, pred_bias) return tf.nn.softmax(logits) #self.preds_by_word1 = tf.pack([GetWordPred(o_) for o_ in mats]) #self.preds_by_word = tf.reshape(self.preds_by_word1, self.y.get_shape()) #self.probs = tf.mul(tf.expand_dims(self._mask,2), self.preds_by_word) self.preds_by_word = tf.pack([GetWordPred(o_) for o_ in mats]) self.preds_by_instance = tf.pack([self.preds_by_word[:,i,:] for i in range(self.preds_by_word.get_shape()[1])]) self.probs = tf.mul(tf.expand_dims(self._mask,2), self.preds_by_instance) self._xent = _SafeXEnt(self.y, self.probs, class_weights=class_weights, sumd=[1,2])
Python
def _SafeXEnt(y, probs, eps=0.0001, class_weights=None, sumd=[1]): """Version of cross entropy loss that should not produce NaNs. If the predicted proability for the true class is near zero then when taking the log it can produce a NaN, which ruins everything. This function ensures each probability is at least eps and no more than one before taking the log. Args: y: matrix of true probabilities same size as probs probs: matrix of probabilities for the minibatch eps: value to clip the probabilities at class_weights: vector of relative weights to be assigned to each class sumd: dimensions along which to sum the x-ent matrix Returns: cross entropy loss for each example in the minibatch """ adjusted_probs = tf.clip_by_value(probs, eps, 1.0 - eps) xent_mat = -y * tf.log(adjusted_probs) if class_weights is not None: xent_mat *= class_weights return tf.reduce_sum(xent_mat, sumd)
def _SafeXEnt(y, probs, eps=0.0001, class_weights=None, sumd=[1]): """Version of cross entropy loss that should not produce NaNs. If the predicted proability for the true class is near zero then when taking the log it can produce a NaN, which ruins everything. This function ensures each probability is at least eps and no more than one before taking the log. Args: y: matrix of true probabilities same size as probs probs: matrix of probabilities for the minibatch eps: value to clip the probabilities at class_weights: vector of relative weights to be assigned to each class sumd: dimensions along which to sum the x-ent matrix Returns: cross entropy loss for each example in the minibatch """ adjusted_probs = tf.clip_by_value(probs, eps, 1.0 - eps) xent_mat = -y * tf.log(adjusted_probs) if class_weights is not None: xent_mat *= class_weights return tf.reduce_sum(xent_mat, sumd)
Python
def _SafeNegEntropy(probs, batch_size, eps=0.0001): """Computes negative entropy in a way that will not overflow.""" adjusted_probs = tf.clip_by_value(probs, eps, 1.0 - eps) entropy = tf.mul(probs, tf.log(adjusted_probs)) return tf.reduce_sum(entropy) / batch_size
def _SafeNegEntropy(probs, batch_size, eps=0.0001): """Computes negative entropy in a way that will not overflow.""" adjusted_probs = tf.clip_by_value(probs, eps, 1.0 - eps) entropy = tf.mul(probs, tf.log(adjusted_probs)) return tf.reduce_sum(entropy) / batch_size
Python
def Eval(expdir): """Evaluates on dev data. Writes results to a results.tsv file in the expdir for use in the scoring script. Args: expdir: path to experiment directory """ if args.data == 'codeswitch': return EvalPerWord(expdir) saver.restore(session, os.path.join(expdir, 'model.bin')) all_preds, all_labs = [], [] for _ in xrange(dataset.GetNumBatches()): words, seqlen, labs, weights = dataset.GetNextBatch() batch_data = MakeFeedDict(words, seqlen, labs) if args.model in ["word", "tweet"]: model_vars = [model.probs, model.preds_by_word] probs, pp = session.run(model_vars, batch_data) elif args.model in ["char"]: probs, pp = session.run([model.probs, model.preds], batch_data) idx = weights != 0 all_preds += [output_vocab[p] for p in np.argmax(probs[idx, :], axis=1)] all_labs += [output_vocab[p] for p in np.argmax(labs[idx, :], axis=1)] util.Metrics(all_preds, all_labs) # This output file is in the format needed to score for TweetLID ids = dataset.GetIds() with open(os.path.join(expdir, 'results.tsv'), 'w') as f: for idnum, p in zip(ids, all_preds): f.write('{0}\t{1}\n'.format(idnum, p))
def Eval(expdir): """Evaluates on dev data. Writes results to a results.tsv file in the expdir for use in the scoring script. Args: expdir: path to experiment directory """ if args.data == 'codeswitch': return EvalPerWord(expdir) saver.restore(session, os.path.join(expdir, 'model.bin')) all_preds, all_labs = [], [] for _ in xrange(dataset.GetNumBatches()): words, seqlen, labs, weights = dataset.GetNextBatch() batch_data = MakeFeedDict(words, seqlen, labs) if args.model in ["word", "tweet"]: model_vars = [model.probs, model.preds_by_word] probs, pp = session.run(model_vars, batch_data) elif args.model in ["char"]: probs, pp = session.run([model.probs, model.preds], batch_data) idx = weights != 0 all_preds += [output_vocab[p] for p in np.argmax(probs[idx, :], axis=1)] all_labs += [output_vocab[p] for p in np.argmax(labs[idx, :], axis=1)] util.Metrics(all_preds, all_labs) # This output file is in the format needed to score for TweetLID ids = dataset.GetIds() with open(os.path.join(expdir, 'results.tsv'), 'w') as f: for idnum, p in zip(ids, all_preds): f.write('{0}\t{1}\n'.format(idnum, p))
Python
def Debug(expdir): """Plots language and word embeddings from saved model.""" saver.restore(session, os.path.join(expdir, 'model.bin')) # Plot the language embeddings z = [x for x in tf.trainable_variables() if 'pred_mat' in x.name][0] zz = z.eval(session) c = util.GetProj(zz.T) lang_names = [util.GetLangName(output_vocab[i]) for i in xrange(len(output_vocab))] util.PlotText(c, lang_names) # plot some word embeddings batch_data = {c2v.words_as_chars: the_words} word_embeddings = session.run([c2v.word_embeddings], batch_data)[0] c = util.GetProj(word_embeddings) util.PlotText(c, input_vocab)
def Debug(expdir): """Plots language and word embeddings from saved model.""" saver.restore(session, os.path.join(expdir, 'model.bin')) # Plot the language embeddings z = [x for x in tf.trainable_variables() if 'pred_mat' in x.name][0] zz = z.eval(session) c = util.GetProj(zz.T) lang_names = [util.GetLangName(output_vocab[i]) for i in xrange(len(output_vocab))] util.PlotText(c, lang_names) # plot some word embeddings batch_data = {c2v.words_as_chars: the_words} word_embeddings = session.run([c2v.word_embeddings], batch_data)[0] c = util.GetProj(word_embeddings) util.PlotText(c, input_vocab)
Python
def MakeFeedDict(words, seqlen, labs, ws=None): """Create the feed dict to process each batch. All the inputs should be from the GetNextBatch command. Args: words: matrix of word ids seqlen: vector of sequence lengths labs: target matrix ws: per-example weights Returns: dictionary to be used as feed dict. """ batch_data = { model.seq_lens: seqlen, model.x: words, } if mode == 'train': batch_data[model.y] = labs batch_data[model.example_weights] = ws batch_data[dropout_keep_prob] = model_params['dropout_keep_prob'] if not baseline: batch_vocab, words_remapped = Char2Vec.GetBatchVocab(words) batch_data.update({ c2v.words_as_chars: the_words[batch_vocab, :], model.x: words_remapped }) if hasattr(c2v, 'seq_lens'): batch_data.update({ c2v.seq_lens: word_lengths[batch_vocab], c2v.batch_dim: len(batch_vocab) }) return batch_data
def MakeFeedDict(words, seqlen, labs, ws=None): """Create the feed dict to process each batch. All the inputs should be from the GetNextBatch command. Args: words: matrix of word ids seqlen: vector of sequence lengths labs: target matrix ws: per-example weights Returns: dictionary to be used as feed dict. """ batch_data = { model.seq_lens: seqlen, model.x: words, } if mode == 'train': batch_data[model.y] = labs batch_data[model.example_weights] = ws batch_data[dropout_keep_prob] = model_params['dropout_keep_prob'] if not baseline: batch_vocab, words_remapped = Char2Vec.GetBatchVocab(words) batch_data.update({ c2v.words_as_chars: the_words[batch_vocab, :], model.x: words_remapped }) if hasattr(c2v, 'seq_lens'): batch_data.update({ c2v.seq_lens: word_lengths[batch_vocab], c2v.batch_dim: len(batch_vocab) }) return batch_data
Python
def LoadData(filename, mode='train', model='tweet'): """Load data stored in tweetlid format. (i.e. tab-separated tweetid, language, tweet) Partitioning between train/dev/eval is done by the last digit of the id number for each training example. Digits 2 through 9 are used for training and 1 is used as a dev set. Currently, the eval set is never loaded and confusingly to load the dev set you have to use 'eval' for the mode argument. This function splits the tweet into a list of units. The level of splitting is controlled by the model argument. Args: filename: where to get the data mode: train or eval or all unit: word, tweets or chars Returns: tuple of sentences, labels and ids """ ids, labels, sentences = [], [], [] with gzip.open(filename, 'r') as f: for line in f: tweetid, lang, tweet = line.split('\t') idx = int(tweetid) % 10 # use this to partition data if mode == 'train' and idx < 2: continue if mode == 'eval+final' and idx > 2: continue if mode == 'eval' and idx != 1: continue if mode == 'final' and idx != 0: continue ids.append(tweetid) # split used to handle code switching tweets labels.append(re.split(r'\+|/', lang)) # The { and } brackets are used for start/end symbols if model in ['word', 'tweet']: #split on whitespace to get words sentences.append(['{'] + [unicode(x_.decode('utf8')) for x_ in tweet.split()] + ['}']) elif model=='char': #include full tweet as single unicode string (list of length 3) sentences.append([u'{'] + [unicode(tweet.decode('utf8'))] + [u'}']) else: msg = 'Invalid unit type <{0}> for tokenizing tweet'.format(model) raise ValueError(msg) print '{0} examples loaded'.format(len(sentences)) return sentences, labels, ids
def LoadData(filename, mode='train', model='tweet'): """Load data stored in tweetlid format. (i.e. tab-separated tweetid, language, tweet) Partitioning between train/dev/eval is done by the last digit of the id number for each training example. Digits 2 through 9 are used for training and 1 is used as a dev set. Currently, the eval set is never loaded and confusingly to load the dev set you have to use 'eval' for the mode argument. This function splits the tweet into a list of units. The level of splitting is controlled by the model argument. Args: filename: where to get the data mode: train or eval or all unit: word, tweets or chars Returns: tuple of sentences, labels and ids """ ids, labels, sentences = [], [], [] with gzip.open(filename, 'r') as f: for line in f: tweetid, lang, tweet = line.split('\t') idx = int(tweetid) % 10 # use this to partition data if mode == 'train' and idx < 2: continue if mode == 'eval+final' and idx > 2: continue if mode == 'eval' and idx != 1: continue if mode == 'final' and idx != 0: continue ids.append(tweetid) # split used to handle code switching tweets labels.append(re.split(r'\+|/', lang)) # The { and } brackets are used for start/end symbols if model in ['word', 'tweet']: #split on whitespace to get words sentences.append(['{'] + [unicode(x_.decode('utf8')) for x_ in tweet.split()] + ['}']) elif model=='char': #include full tweet as single unicode string (list of length 3) sentences.append([u'{'] + [unicode(tweet.decode('utf8'))] + [u'}']) else: msg = 'Invalid unit type <{0}> for tokenizing tweet'.format(model) raise ValueError(msg) print '{0} examples loaded'.format(len(sentences)) return sentences, labels, ids
Python
def GetNumberLines(lines, vocab, pad_length): """Convert list of words to matrix of word ids.""" out = [] for line in lines: if len(line) < pad_length: line += ['}'] * (pad_length - len(line)) out.append([vocab[w] for w in line]) return np.array(out)
def GetNumberLines(lines, vocab, pad_length): """Convert list of words to matrix of word ids.""" out = [] for line in lines: if len(line) < pad_length: line += ['}'] * (pad_length - len(line)) out.append([vocab[w] for w in line]) return np.array(out)
Python
def cruse_pred_fn(self, audio_data: torch.tensor, training_process): """ Must return the predicted audio ready to be plotted """ # calculate complex spectrum and log pow spec (lps) # TODO: avoid hardcoding window hann_window = torch.hann_window(self.window_size).to(audio_data.device) # TODO: fixed transformation for now, allow flexibility # or allow just external callables audio_complex = torch.stft(audio_data, onesided=True, n_fft=self.window_size, center=True, hop_length=self.hop_size, normalized=False, window=hann_window, return_complex=True) audio_lps = torch.log10(torch.abs(audio_complex) ** 2 + 1e-7) pred_audio_mask = training_process.model(audio_lps) pred_audio_complex = ( pred_audio_mask.squeeze(1).permute(0, 2, 1) * audio_complex ) pred_audio = torch.istft(pred_audio_complex, onesided=True, n_fft=self.window_size, center=True, hop_length=self.hop_size, normalized=False, window=hann_window) pred_audio = pred_audio.cpu().numpy().reshape(-1) return pred_audio
def cruse_pred_fn(self, audio_data: torch.tensor, training_process): """ Must return the predicted audio ready to be plotted """ # calculate complex spectrum and log pow spec (lps) # TODO: avoid hardcoding window hann_window = torch.hann_window(self.window_size).to(audio_data.device) # TODO: fixed transformation for now, allow flexibility # or allow just external callables audio_complex = torch.stft(audio_data, onesided=True, n_fft=self.window_size, center=True, hop_length=self.hop_size, normalized=False, window=hann_window, return_complex=True) audio_lps = torch.log10(torch.abs(audio_complex) ** 2 + 1e-7) pred_audio_mask = training_process.model(audio_lps) pred_audio_complex = ( pred_audio_mask.squeeze(1).permute(0, 2, 1) * audio_complex ) pred_audio = torch.istft(pred_audio_complex, onesided=True, n_fft=self.window_size, center=True, hop_length=self.hop_size, normalized=False, window=hann_window) pred_audio = pred_audio.cpu().numpy().reshape(-1) return pred_audio
Python
def stft(self, x: torch.tensor): """ Calculates the Short-Time Fourier Transform over a given tensor. The expected tensor is a 2D tensor in the following shape: (batch_size, audio_samples) e.g. a (4, 16000) would correspond to a batch of 4 files of 1 second each at 16kHz of sample rate. Args: x (torch.tensor): Input tensor containing a single audio chunk or a batch of audio chunks. The last dimension is assumed to contain the raw audio samples. Returns: real, imag (torch.tensor, torch.tensor): A tuple with two torch tensors corresponding to the real and imaginary part of the STFT performed over the input tensor. Only the first half of the resulting bins are returned. """ # hard coded hann window for now! hann_window = torch.hann_window(self.window_size).to(x.device) stft = torch.stft(x, onesided=True, center=False, n_fft=self.window_size, hop_length=self.hop_size, normalized=False, window=hann_window, return_complex=True) return torch.abs(stft), torch.angle(stft)
def stft(self, x: torch.tensor): """ Calculates the Short-Time Fourier Transform over a given tensor. The expected tensor is a 2D tensor in the following shape: (batch_size, audio_samples) e.g. a (4, 16000) would correspond to a batch of 4 files of 1 second each at 16kHz of sample rate. Args: x (torch.tensor): Input tensor containing a single audio chunk or a batch of audio chunks. The last dimension is assumed to contain the raw audio samples. Returns: real, imag (torch.tensor, torch.tensor): A tuple with two torch tensors corresponding to the real and imaginary part of the STFT performed over the input tensor. Only the first half of the resulting bins are returned. """ # hard coded hann window for now! hann_window = torch.hann_window(self.window_size).to(x.device) stft = torch.stft(x, onesided=True, center=False, n_fft=self.window_size, hop_length=self.hop_size, normalized=False, window=hann_window, return_complex=True) return torch.abs(stft), torch.angle(stft)
Python
def ifft(self, x_mag: torch.tensor, x_phase: torch.tensor): """ Calculates the Inverse Fast Fourier Transform of the input real and imaginary tensors. It assumes that the reconstruction is done using only the first half of the input features. Args: x_mag (torch.tensor): Magnitude input tensor. x_phase (torch.tensor): Phase input tensor. # calculating the complex representation s1_stft = (tf.cast(x[0], tf.complex64) * tf.exp( (1j * tf.cast(x[1], tf.complex64)))) # returning the time domain frames """ x_real = x_mag * torch.cos(x_phase) x_imag = x_mag * torch.sin(x_phase) x_complex = torch.complex(x_real, x_imag) ifft = torch.fft.irfft(x_complex, dim=-1) return ifft
def ifft(self, x_mag: torch.tensor, x_phase: torch.tensor): """ Calculates the Inverse Fast Fourier Transform of the input real and imaginary tensors. It assumes that the reconstruction is done using only the first half of the input features. Args: x_mag (torch.tensor): Magnitude input tensor. x_phase (torch.tensor): Phase input tensor. # calculating the complex representation s1_stft = (tf.cast(x[0], tf.complex64) * tf.exp( (1j * tf.cast(x[1], tf.complex64)))) # returning the time domain frames """ x_real = x_mag * torch.cos(x_phase) x_imag = x_mag * torch.sin(x_phase) x_complex = torch.complex(x_real, x_imag) ifft = torch.fft.irfft(x_complex, dim=-1) return ifft
Python
def as_dict(self): """Convert to an ordered dictionary. Returns ------- out : OrderedDict An OrderedDict representing the return values. """ return collections.OrderedDict(zip(self._names, self))
def as_dict(self): """Convert to an ordered dictionary. Returns ------- out : OrderedDict An OrderedDict representing the return values. """ return collections.OrderedDict(zip(self._names, self))
Python
def conv_val(inval, og_type, final_type, temp, out_unit, conv): """ Helper function to convert value units for testing purposes. """ if temp is not None: retval = utils.convert_value( inval, og_type, final_type, temperature=temp, out_unit=out_unit) else: retval = utils.convert_value( inval, og_type, final_type, out_unit=out_unit) if conv is not None: return retval.to(utils.unit(conv)).magnitude else: return retval.magnitude
def conv_val(inval, og_type, final_type, temp, out_unit, conv): """ Helper function to convert value units for testing purposes. """ if temp is not None: retval = utils.convert_value( inval, og_type, final_type, temperature=temp, out_unit=out_unit) else: retval = utils.convert_value( inval, og_type, final_type, out_unit=out_unit) if conv is not None: return retval.to(utils.unit(conv)).magnitude else: return retval.magnitude
Python
def conv_err(inerr, inval, og_type, final_type, temp, out_unit, conv): """ Helper function to convert error units for testing purposes. """ if temp is not None: retval = utils.convert_error( inerr, inval, og_type, final_type, temperature=temp, out_unit=out_unit) else: retval = utils.convert_error( inerr, inval, og_type, final_type, out_unit=out_unit) if conv is not None: return retval.to(utils.unit(conv)).magnitude else: return retval.magnitude
def conv_err(inerr, inval, og_type, final_type, temp, out_unit, conv): """ Helper function to convert error units for testing purposes. """ if temp is not None: retval = utils.convert_error( inerr, inval, og_type, final_type, temperature=temp, out_unit=out_unit) else: retval = utils.convert_error( inerr, inval, og_type, final_type, out_unit=out_unit) if conv is not None: return retval.to(utils.unit(conv)).magnitude else: return retval.magnitude
Python
def pointcloud_feature_spec(with_annotations=True): """Feature specification of pointcloud data. Args: with_annotations: If true semantic labels for points are also present. This is the default (True) for training data. Returns: Feature specification (tfds) for a single pointcloud data. """ return tfds.features.FeaturesDict({ # 3D pointcloud data. 'pointcloud': { # Pointcloud positions (Nx3). 'position': tfds.features.Tensor(shape=(None, 3), dtype=tf.int64), # Pointcloud intensity (Nx1). 'intensity': tfds.features.Tensor(shape=(None, 1), dtype=tf.int64), # Pointcloud labesl (Nx1). 'labels': tfds.features.Tensor(shape=(None, 1), dtype=tf.int64), }, })
def pointcloud_feature_spec(with_annotations=True): """Feature specification of pointcloud data. Args: with_annotations: If true semantic labels for points are also present. This is the default (True) for training data. Returns: Feature specification (tfds) for a single pointcloud data. """ return tfds.features.FeaturesDict({ # 3D pointcloud data. 'pointcloud': { # Pointcloud positions (Nx3). 'position': tfds.features.Tensor(shape=(None, 3), dtype=tf.int64), # Pointcloud intensity (Nx1). 'intensity': tfds.features.Tensor(shape=(None, 1), dtype=tf.int64), # Pointcloud labesl (Nx1). 'labels': tfds.features.Tensor(shape=(None, 1), dtype=tf.int64), }, })
Python
def run(file_name, args, debug): """ Run a specified Pikachu file in a virtual environment. Arguments: file_name -- the name and path of a file containing a pikachu program. args -- the command line arguments specified when the pikachu interpreter was run. """ pi_stack = PikaStack() pika_stack = PikaStack() stacks_dict = { "pi pikachu": pi_stack, "pika pikachu": pika_stack } for a in args: pi_stack.PUSH(a) reader = PikaReader(file_name) while True: try: if debug: try: print "\nline {}: {}\npi {}\npika {}".format(reader.line_num, reader.lines[reader.line_num], pi_stack.elements, pika_stack.elements) except KeyError: pass command = next(reader) except StopIteration: print '' break command = command.split(' chu')[0] terms = command.split() if len(terms) == 0: continue if len(terms) == 1: pika_error(reader.line_num, 'unknown command "{}"'.format(terms[0])) elif len(terms) < 3: command = " ".join(terms) if command == "pi pikachu": pi_stack.POP() elif command == "pika pikachu": pika_stack.POP() elif command == "pi pika": if not pi_stack.EMPTY(): pika_stack.PUSH(pi_stack.PEEK()) elif command == "pika pi": if not pika_stack.EMPTY(): pi_stack.PUSH(pika_stack.PEEK()) elif command == "pi pi": if not pika_stack.EMPTY(): pika_stack.RAND() elif command == "pikachu pikachu": try: line_num = len(next(reader).split()) except StopIteration: pika_error(reader.line_num - 1, "unexpected EoF, expected new line") if pi_stack.PEEK() != pika_stack.PEEK(): continue reader.goto(line_num) elif command == "pika pika": try: line_num = len(next(reader).split()) except StopIteration: pika_error(reader.line_num - 1, "unexpected EoF, expected new line") if pi_stack.PEEK() == pika_stack.PEEK(): continue reader.goto(line_num) else: pika_error(reader.line_num, 'unknown command "{}"'.format(reader.lines[reader.line_num])) elif len(terms) < 4: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) command = terms[0] if command == "pikachu": current_stack.DIV() if current_stack.PEEK() == float('NaN'): pika_error(reader.line_num, 'cannot divide by 0') else: current_stack.PUSH(1) elif len(terms) < 5: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) command = " ".join(terms[:-2]) if command == "pi pika": current_stack.ADD() elif command == "pika pi": current_stack.SUB() elif command == "pi pikachu": current_stack.MULT() elif command == "pika pikachu": if not current_stack.EMPTY(): pika_print(current_stack.POP()) else: pika_print("undefined") elif command == "pikachu pikachu": n = current_stack.POP() if n and type(n) == int: pika_print(chr(n)) else: pika_print("undefined") else: current_stack.PUSH(2) else: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) current_stack.PUSH(len(terms) - 2)
def run(file_name, args, debug): """ Run a specified Pikachu file in a virtual environment. Arguments: file_name -- the name and path of a file containing a pikachu program. args -- the command line arguments specified when the pikachu interpreter was run. """ pi_stack = PikaStack() pika_stack = PikaStack() stacks_dict = { "pi pikachu": pi_stack, "pika pikachu": pika_stack } for a in args: pi_stack.PUSH(a) reader = PikaReader(file_name) while True: try: if debug: try: print "\nline {}: {}\npi {}\npika {}".format(reader.line_num, reader.lines[reader.line_num], pi_stack.elements, pika_stack.elements) except KeyError: pass command = next(reader) except StopIteration: print '' break command = command.split(' chu')[0] terms = command.split() if len(terms) == 0: continue if len(terms) == 1: pika_error(reader.line_num, 'unknown command "{}"'.format(terms[0])) elif len(terms) < 3: command = " ".join(terms) if command == "pi pikachu": pi_stack.POP() elif command == "pika pikachu": pika_stack.POP() elif command == "pi pika": if not pi_stack.EMPTY(): pika_stack.PUSH(pi_stack.PEEK()) elif command == "pika pi": if not pika_stack.EMPTY(): pi_stack.PUSH(pika_stack.PEEK()) elif command == "pi pi": if not pika_stack.EMPTY(): pika_stack.RAND() elif command == "pikachu pikachu": try: line_num = len(next(reader).split()) except StopIteration: pika_error(reader.line_num - 1, "unexpected EoF, expected new line") if pi_stack.PEEK() != pika_stack.PEEK(): continue reader.goto(line_num) elif command == "pika pika": try: line_num = len(next(reader).split()) except StopIteration: pika_error(reader.line_num - 1, "unexpected EoF, expected new line") if pi_stack.PEEK() == pika_stack.PEEK(): continue reader.goto(line_num) else: pika_error(reader.line_num, 'unknown command "{}"'.format(reader.lines[reader.line_num])) elif len(terms) < 4: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) command = terms[0] if command == "pikachu": current_stack.DIV() if current_stack.PEEK() == float('NaN'): pika_error(reader.line_num, 'cannot divide by 0') else: current_stack.PUSH(1) elif len(terms) < 5: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) command = " ".join(terms[:-2]) if command == "pi pika": current_stack.ADD() elif command == "pika pi": current_stack.SUB() elif command == "pi pikachu": current_stack.MULT() elif command == "pika pikachu": if not current_stack.EMPTY(): pika_print(current_stack.POP()) else: pika_print("undefined") elif command == "pikachu pikachu": n = current_stack.POP() if n and type(n) == int: pika_print(chr(n)) else: pika_print("undefined") else: current_stack.PUSH(2) else: try: current_stack = stacks_dict[" ".join(terms[-2:])] except KeyError: pika_error(reader.line_num, 'unknown pikachu "{}"'.format(" ".join(terms[-2:]))) current_stack.PUSH(len(terms) - 2)
Python
def pika_error(line_num, msg): """ Display information about syntax errors in the pikachu program then exit. Arguments: lineNo -- the line where the syntax error was found. """ pika_print('SyntaxError in line {}: {}'.format(line_num, msg)) exit()
def pika_error(line_num, msg): """ Display information about syntax errors in the pikachu program then exit. Arguments: lineNo -- the line where the syntax error was found. """ pika_print('SyntaxError in line {}: {}'.format(line_num, msg)) exit()
Python
def next(self): """ Provide support for the next() function. next(this) is used to iterate through the pikachu code a line at a time. Exceptions: StopIteration -- when the end of the file has been reached. """ self.line_num += 1 if self.line_num > len(self.lines): # EOF raise StopIteration line = self.lines[self.line_num] if not line: # skip blank lines and comments return self.next() # check for invalid repetition of pi, pika, pikachu target = None reps = 0 for term in line.split(): if term == target: reps += 1 if reps >= 3: pika_error(self.line_num, 'too many repetitions') else: target = term reps = 1 return line
def next(self): """ Provide support for the next() function. next(this) is used to iterate through the pikachu code a line at a time. Exceptions: StopIteration -- when the end of the file has been reached. """ self.line_num += 1 if self.line_num > len(self.lines): # EOF raise StopIteration line = self.lines[self.line_num] if not line: # skip blank lines and comments return self.next() # check for invalid repetition of pi, pika, pikachu target = None reps = 0 for term in line.split(): if term == target: reps += 1 if reps >= 3: pika_error(self.line_num, 'too many repetitions') else: target = term reps = 1 return line
Python
def goto(self, line_num): """ Directs the reader to a specific line of code. Arguments: line_num -- the line of code to set the reader to. If line_num is greater than the number of lines in the code. The reader will be set to read the last line of the code. """ if line_num > len(self.lines): line_num = len(self.lines) - 2 self.line_num = line_num - 1
def goto(self, line_num): """ Directs the reader to a specific line of code. Arguments: line_num -- the line of code to set the reader to. If line_num is greater than the number of lines in the code. The reader will be set to read the last line of the code. """ if line_num > len(self.lines): line_num = len(self.lines) - 2 self.line_num = line_num - 1
Python
def ADD(self): """ Add the top two elements on the stack. Adds the top two elements on the stack and pushes the result back onto the stack. Error handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is equal to the current top. """ a = self.POP() b = self.POP() c = a + b self.PUSH(b) self.PUSH(a) self.PUSH(c)
def ADD(self): """ Add the top two elements on the stack. Adds the top two elements on the stack and pushes the result back onto the stack. Error handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is equal to the current top. """ a = self.POP() b = self.POP() c = a + b self.PUSH(b) self.PUSH(a) self.PUSH(c)
Python
def SUB(self): """ Subtracts the top two elements. Subtracts the first element on the stack from the second element and pushes the result back onto the stack. Error Handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is -top """ a = self.POP() b = self.POP() c = b - a self.PUSH(b) self.PUSH(a) self.PUSH(c)
def SUB(self): """ Subtracts the top two elements. Subtracts the first element on the stack from the second element and pushes the result back onto the stack. Error Handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is -top """ a = self.POP() b = self.POP() c = b - a self.PUSH(b) self.PUSH(a) self.PUSH(c)
Python
def MULT(self): """ Multiplies the top two elements on the stack. Multiplies the top two elements on the stack and pushes the result back onto the stack. Error handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is 0 """ a = self.POP() b = self.POP() c = a * b self.PUSH(b) self.PUSH(a) self.PUSH(c)
def MULT(self): """ Multiplies the top two elements on the stack. Multiplies the top two elements on the stack and pushes the result back onto the stack. Error handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is 0 """ a = self.POP() b = self.POP() c = a * b self.PUSH(b) self.PUSH(a) self.PUSH(c)
Python
def DIV(self): """ Divides the top two elements on the stack Divides the second element on the stack by the first element on the stack, and pushes the result back on top of the stack. Error Handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is 0 If the divisor is '0', the result pushed to the top of the stack is float("NaN") """ a = self.POP() b = self.POP() if a == 0: self.PUSH(float('NaN')) else: c = b // a self.PUSH(b) self.PUSH(a) self.PUSH(c)
def DIV(self): """ Divides the top two elements on the stack Divides the second element on the stack by the first element on the stack, and pushes the result back on top of the stack. Error Handling: If the stack is empty, nothing happens. If the stack only has a single element, the result pushed to the top of the stack is 0 If the divisor is '0', the result pushed to the top of the stack is float("NaN") """ a = self.POP() b = self.POP() if a == 0: self.PUSH(float('NaN')) else: c = b // a self.PUSH(b) self.PUSH(a) self.PUSH(c)
Python
def RAND(self): """ Returns a random number between 1 and the top element on the stack (inclusive). Error Hnadling: If stack is empty, push 0 to the top of the stack. If top of the stack is negative, push 0 to the top of the stack. :return: """ if self.PEEK() and self.PEEK() > 0: self.PUSH(randrange(self.PEEK()) + 1) else: self.PUSH(0)
def RAND(self): """ Returns a random number between 1 and the top element on the stack (inclusive). Error Hnadling: If stack is empty, push 0 to the top of the stack. If top of the stack is negative, push 0 to the top of the stack. :return: """ if self.PEEK() and self.PEEK() > 0: self.PUSH(randrange(self.PEEK()) + 1) else: self.PUSH(0)
Python
def POP(self): """ Pops and returns the top element from the stack. Error Handling: If the stack is empty 0 is returned. """ if len(self.elements): return self.elements.pop() else: return 0
def POP(self): """ Pops and returns the top element from the stack. Error Handling: If the stack is empty 0 is returned. """ if len(self.elements): return self.elements.pop() else: return 0
Python
def PUSH(self, element): """ Pushes an element to the top of the stack. Arguments: element -> The element to push on the top of the stack. """ self.elements.append(element)
def PUSH(self, element): """ Pushes an element to the top of the stack. Arguments: element -> The element to push on the top of the stack. """ self.elements.append(element)
Python
def PEEK(self): """ Returns the top element from the stack without removing it. Error Handling: If the stack is empty 0 is returned. """ if len(self.elements): return self.elements[-1] else: return 0
def PEEK(self): """ Returns the top element from the stack without removing it. Error Handling: If the stack is empty 0 is returned. """ if len(self.elements): return self.elements[-1] else: return 0
Python
def EMPTY(self): """ Returns True if the stack is empty, false otherwise. """ return len(self.elements) == 0
def EMPTY(self): """ Returns True if the stack is empty, false otherwise. """ return len(self.elements) == 0
Python
def read(section: str = "DEFAULT"): """ reads the ~/.datadog.ini `section` with the following allowed properties :param section identifying a specific datadog account api_key: Datadog API key type api_key: string app_key: Datadog application key type app_key: string proxies: Proxy to use to connect to Datadog API type proxies: dictionary mapping protocol to the URL of the proxy. api_host: Datadog API endpoint type api_host: url cacert: Path to local certificate file used to verify SSL \ certificates. Can also be set to True (default) to use the systems \ certificate store, or False to skip SSL verification type cacert: path or boolean mute: Mute any ApiError or ClientError before they escape \ from datadog.api.HTTPClient (default: True). type mute: boolean """ parser = ConfigParser() parser.read(path.expanduser("~/.datadog.ini")) result = {k: v for (k, v) in parser.items(section) if k in allowed_properties} if not result.get("api_key"): result["api_key"] = os.environ.get( "DATADOG_API_KEY", os.environ.get("DD_API_KEY") ) if not result.get("app_key"): result["app_key"] = os.environ.get( "DATADOG_APP_KEY", os.environ.get("DD_APP_KEY") ) return result
def read(section: str = "DEFAULT"): """ reads the ~/.datadog.ini `section` with the following allowed properties :param section identifying a specific datadog account api_key: Datadog API key type api_key: string app_key: Datadog application key type app_key: string proxies: Proxy to use to connect to Datadog API type proxies: dictionary mapping protocol to the URL of the proxy. api_host: Datadog API endpoint type api_host: url cacert: Path to local certificate file used to verify SSL \ certificates. Can also be set to True (default) to use the systems \ certificate store, or False to skip SSL verification type cacert: path or boolean mute: Mute any ApiError or ClientError before they escape \ from datadog.api.HTTPClient (default: True). type mute: boolean """ parser = ConfigParser() parser.read(path.expanduser("~/.datadog.ini")) result = {k: v for (k, v) in parser.items(section) if k in allowed_properties} if not result.get("api_key"): result["api_key"] = os.environ.get( "DATADOG_API_KEY", os.environ.get("DD_API_KEY") ) if not result.get("app_key"): result["app_key"] = os.environ.get( "DATADOG_APP_KEY", os.environ.get("DD_APP_KEY") ) return result
Python
def restar(x:variable, y:variable) -> variable: """Substracts two numbers and gives the uncertainty Args: x (variable): Variable with value and uncertainty y (variable): Variable with value and uncertainty Returns: variable: The substraction of the two values """ res = x.value - y.value d_res = add_sub(x.d_value, y.d_value) return variable(res, d_res)
def restar(x:variable, y:variable) -> variable: """Substracts two numbers and gives the uncertainty Args: x (variable): Variable with value and uncertainty y (variable): Variable with value and uncertainty Returns: variable: The substraction of the two values """ res = x.value - y.value d_res = add_sub(x.d_value, y.d_value) return variable(res, d_res)
Python
def multiplicar(x:variable, y:variable) -> variable: """The multiplication of two numbres and gives uncertainty Args: x (variable): Variable with value and uncertainty y (variable): Variable with value and uncertainty Returns: variable: The product of the two numbers """ res = x.value * y.value d_res = mul_div(x.value, y.value, x.d_value, y.d_value) return variable(res, d_res)
def multiplicar(x:variable, y:variable) -> variable: """The multiplication of two numbres and gives uncertainty Args: x (variable): Variable with value and uncertainty y (variable): Variable with value and uncertainty Returns: variable: The product of the two numbers """ res = x.value * y.value d_res = mul_div(x.value, y.value, x.d_value, y.d_value) return variable(res, d_res)
Python
def dividir(x:variable, y:variable) -> variable: """Divides two numbers and gives the uncertainty Args: x (variable): Dividend a variable with value and uncertainty y (variable): Divisor a variable with value and uncertainty, shuoldn't be 0 Returns: variable: The division of the two values """ res = x.value / y.value d_res = mul_div(x.value, y.value, x.d_value, y.d_value) return variable(res, d_res)
def dividir(x:variable, y:variable) -> variable: """Divides two numbers and gives the uncertainty Args: x (variable): Dividend a variable with value and uncertainty y (variable): Divisor a variable with value and uncertainty, shuoldn't be 0 Returns: variable: The division of the two values """ res = x.value / y.value d_res = mul_div(x.value, y.value, x.d_value, y.d_value) return variable(res, d_res)
Python
def userinput(message): """Asks the user for a float value and manages exceptions Args: message (string): Message displayed when prompting the user Returns: float: Number chosen by the user """ while True: try: inner = float(input(message)) return inner except ValueError: print("Not a number")
def userinput(message): """Asks the user for a float value and manages exceptions Args: message (string): Message displayed when prompting the user Returns: float: Number chosen by the user """ while True: try: inner = float(input(message)) return inner except ValueError: print("Not a number")
Python
def managestring(string): """Reads the initial ecuation string provided by the user and isolates the variables for correct value asignations Args: string (string): Formmula entered by the user """ for x in string: lista.append(x) if x.isalpha() and x not in variables: ex = userinput("Value of {}\n>".format(x)) dex = userinput("Enter uncertainty of {}\n>".format(x)) variables.update({x:variable(ex,dex)})
def managestring(string): """Reads the initial ecuation string provided by the user and isolates the variables for correct value asignations Args: string (string): Formmula entered by the user """ for x in string: lista.append(x) if x.isalpha() and x not in variables: ex = userinput("Value of {}\n>".format(x)) dex = userinput("Enter uncertainty of {}\n>".format(x)) variables.update({x:variable(ex,dex)})
Python
def next_value(lista): """Returns the next value fron the original expresion and checks if it should start a substack Args: lista (list): Initial expresion converted to a list Returns: string: The next value of the list, while deleting itself from the list """ try: x = lista.pop(0) except IndexError: print("Incomplete expresion, check and try again") if x == "(": #If it detects a parenthesis it isolates the part inside the parenthesis and append the result of that substack = [] x = next_value(lista) while x != ")": substack.append(x) x = next_value(lista) return evaluate(substack) if x in variables: return variables[x] #If the value is a variable it returns instead the object else: return x
def next_value(lista): """Returns the next value fron the original expresion and checks if it should start a substack Args: lista (list): Initial expresion converted to a list Returns: string: The next value of the list, while deleting itself from the list """ try: x = lista.pop(0) except IndexError: print("Incomplete expresion, check and try again") if x == "(": #If it detects a parenthesis it isolates the part inside the parenthesis and append the result of that substack = [] x = next_value(lista) while x != ")": substack.append(x) x = next_value(lista) return evaluate(substack) if x in variables: return variables[x] #If the value is a variable it returns instead the object else: return x
Python
def evaluate(lista): """Evaluates the expresion, first it starts putting everything into a stack if it finds a multiplication it pushes the product of the operation to the stack parenthesis are taken into account in nextvalue() Args: lista (list): The expresion turned into a list Returns: variable: The result of the whole expresion in variable form """ stack = [] while lista: x = next_value(lista) if x == "*" or x == "/": y = stack.pop() z = next_value(lista) if x == "*": res = multiplicar(y,z) elif x == "/": res = dividir(y,z) stack.append(res) else: stack.append(x) now = next_value(stack) while stack: x = next_value(stack) if x == "+": now = sumar(now, next_value(stack)) elif x == "-": now = restar(now, next_value(stack)) else: print("Unacceptable character found") return now
def evaluate(lista): """Evaluates the expresion, first it starts putting everything into a stack if it finds a multiplication it pushes the product of the operation to the stack parenthesis are taken into account in nextvalue() Args: lista (list): The expresion turned into a list Returns: variable: The result of the whole expresion in variable form """ stack = [] while lista: x = next_value(lista) if x == "*" or x == "/": y = stack.pop() z = next_value(lista) if x == "*": res = multiplicar(y,z) elif x == "/": res = dividir(y,z) stack.append(res) else: stack.append(x) now = next_value(stack) while stack: x = next_value(stack) if x == "+": now = sumar(now, next_value(stack)) elif x == "-": now = restar(now, next_value(stack)) else: print("Unacceptable character found") return now
Python
def new_expresion(): """Makes sure the formula entered by the user is in a correct format and wont explode the code Raises: ValueError: If the string is empty NameError: If there variables with more than 1 character KeyError: If there is an unaccounted value Returns: string: The formula in a correct format """ symbols = "+-*/()" while True: try: string = input("Type in the formula: ") if not string: raise ValueError listedstring = [] for x in string: listedstring.append(x) if not x.isalpha(): if x not in symbols: raise NameError for x in range(len(listedstring)-1): if listedstring[x].isalpha() and listedstring[x+1].isalpha(): raise KeyError except ValueError: print(">ERROR> Empty strings are not allowed\n") except KeyError: print(">ERROR> Variables should only be 1 character, for multiplication use '*'\n") except NameError: print(">ERROR> Don't use numbers. The only operators allowed are: {}".format(symbols)) else: return string
def new_expresion(): """Makes sure the formula entered by the user is in a correct format and wont explode the code Raises: ValueError: If the string is empty NameError: If there variables with more than 1 character KeyError: If there is an unaccounted value Returns: string: The formula in a correct format """ symbols = "+-*/()" while True: try: string = input("Type in the formula: ") if not string: raise ValueError listedstring = [] for x in string: listedstring.append(x) if not x.isalpha(): if x not in symbols: raise NameError for x in range(len(listedstring)-1): if listedstring[x].isalpha() and listedstring[x+1].isalpha(): raise KeyError except ValueError: print(">ERROR> Empty strings are not allowed\n") except KeyError: print(">ERROR> Variables should only be 1 character, for multiplication use '*'\n") except NameError: print(">ERROR> Don't use numbers. The only operators allowed are: {}".format(symbols)) else: return string
Python
def ruleset(self, path, user): """ builds a dictionnary of keys, values where keys are the redis keys, and values is a tuple of (limit, window_size) """ d = {} for name in RULENAMES: limit = getattr(self, name) if limit is not None: key = f"{path}:{user}:{name}" d[key] = (limit, WINDOW_SIZE[name]) return d
def ruleset(self, path, user): """ builds a dictionnary of keys, values where keys are the redis keys, and values is a tuple of (limit, window_size) """ d = {} for name in RULENAMES: limit = getattr(self, name) if limit is not None: key = f"{path}:{user}:{name}" d[key] = (limit, WINDOW_SIZE[name]) return d
Python
def lst(ctx: click.Context, depth: int) -> None: """ list directories with the most dupes until DEPTH. """ dbname = ctx.obj['dbname'] Dp = dupetable.Dupes(dbname) Dp.print_most_common(depth)
def lst(ctx: click.Context, depth: int) -> None: """ list directories with the most dupes until DEPTH. """ dbname = ctx.obj['dbname'] Dp = dupetable.Dupes(dbname) Dp.print_most_common(depth)
Python
def dd(ctx: click.Context, match_deletions: bool, autoselect: bool, dupes_global: bool, do_move: bool, trash: str, outfile: str, deldir: str, pattern: str) -> None: """ Dedupe Directory. Type dd --help for details. \b 1) Dupes are selected by regex search PATTERN within DELDIR. PATTERN is optional and defaults to '.' (any character). Note the regex search (not match). 2) Hits within DELDIR are marked as dupes to delete (if match_deletions, default) or dupes to keep (if match_keeps). 3) If dupes_global is True (default), all dupes outside DELDIR are taken into account: - if match_deletions, they are added to the list of dupes to keep, - if match_keeps, they are marked for deletion. 4) if autoselect ist True and all dupes of a files are marked for deletion, one of them is deselected; if autoselect is False (the default), no deletion is performed if for a hash dupes are not contained in keeptable. if do_move is False, a dry run is done and a pretty printed table is shown and saved as html for further inspection. if do_move is True, dupes marked for deletion will be moved to TRASH and deleted from the database. if TRASH equals "DELETE", dupes are deleted instead of being moved to the Trash. """ option: typing.Dict[str, typing.Any] = {} option['dbname'] = ctx.obj['dbname'] option['deldir'] = str(pathlib.Path(deldir).resolve()) option['pattern'] = pattern option['match_deletions'] = match_deletions option['dupes_global'] = dupes_global option['autoselect'] = autoselect option['dedupe'] = True Dt: dupetable.Dupetable = dupetable.Dupetable(**option) Dt.print_tree(outfile = pathlib.Path(outfile)) if do_move: Dt.delete(trash)
def dd(ctx: click.Context, match_deletions: bool, autoselect: bool, dupes_global: bool, do_move: bool, trash: str, outfile: str, deldir: str, pattern: str) -> None: """ Dedupe Directory. Type dd --help for details. \b 1) Dupes are selected by regex search PATTERN within DELDIR. PATTERN is optional and defaults to '.' (any character). Note the regex search (not match). 2) Hits within DELDIR are marked as dupes to delete (if match_deletions, default) or dupes to keep (if match_keeps). 3) If dupes_global is True (default), all dupes outside DELDIR are taken into account: - if match_deletions, they are added to the list of dupes to keep, - if match_keeps, they are marked for deletion. 4) if autoselect ist True and all dupes of a files are marked for deletion, one of them is deselected; if autoselect is False (the default), no deletion is performed if for a hash dupes are not contained in keeptable. if do_move is False, a dry run is done and a pretty printed table is shown and saved as html for further inspection. if do_move is True, dupes marked for deletion will be moved to TRASH and deleted from the database. if TRASH equals "DELETE", dupes are deleted instead of being moved to the Trash. """ option: typing.Dict[str, typing.Any] = {} option['dbname'] = ctx.obj['dbname'] option['deldir'] = str(pathlib.Path(deldir).resolve()) option['pattern'] = pattern option['match_deletions'] = match_deletions option['dupes_global'] = dupes_global option['autoselect'] = autoselect option['dedupe'] = True Dt: dupetable.Dupetable = dupetable.Dupetable(**option) Dt.print_tree(outfile = pathlib.Path(outfile)) if do_move: Dt.delete(trash)