repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
optimizely/python-sdk
optimizely/bucketer.py
Bucketer.bucket
def bucket(self, experiment, user_id, bucketing_id): """ For a given experiment and bucketing ID determines variation to be shown to user. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for user. bucketing_id: ID to be used for bucketing the user. Returns: Variation in which user with ID user_id will be put in. None if no variation. """ if not experiment: return None # Determine if experiment is in a mutually exclusive group if experiment.groupPolicy in GROUP_POLICIES: group = self.config.get_group(experiment.groupId) if not group: return None user_experiment_id = self.find_bucket(bucketing_id, experiment.groupId, group.trafficAllocation) if not user_experiment_id: self.config.logger.info('User "%s" is in no experiment.' % user_id) return None if user_experiment_id != experiment.id: self.config.logger.info('User "%s" is not in experiment "%s" of group %s.' % ( user_id, experiment.key, experiment.groupId )) return None self.config.logger.info('User "%s" is in experiment %s of group %s.' % ( user_id, experiment.key, experiment.groupId )) # Bucket user if not in white-list and in group (if any) variation_id = self.find_bucket(bucketing_id, experiment.id, experiment.trafficAllocation) if variation_id: variation = self.config.get_variation_from_id(experiment.key, variation_id) self.config.logger.info('User "%s" is in variation "%s" of experiment %s.' % ( user_id, variation.key, experiment.key )) return variation self.config.logger.info('User "%s" is in no variation.' % user_id) return None
python
def bucket(self, experiment, user_id, bucketing_id): """ For a given experiment and bucketing ID determines variation to be shown to user. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for user. bucketing_id: ID to be used for bucketing the user. Returns: Variation in which user with ID user_id will be put in. None if no variation. """ if not experiment: return None # Determine if experiment is in a mutually exclusive group if experiment.groupPolicy in GROUP_POLICIES: group = self.config.get_group(experiment.groupId) if not group: return None user_experiment_id = self.find_bucket(bucketing_id, experiment.groupId, group.trafficAllocation) if not user_experiment_id: self.config.logger.info('User "%s" is in no experiment.' % user_id) return None if user_experiment_id != experiment.id: self.config.logger.info('User "%s" is not in experiment "%s" of group %s.' % ( user_id, experiment.key, experiment.groupId )) return None self.config.logger.info('User "%s" is in experiment %s of group %s.' % ( user_id, experiment.key, experiment.groupId )) # Bucket user if not in white-list and in group (if any) variation_id = self.find_bucket(bucketing_id, experiment.id, experiment.trafficAllocation) if variation_id: variation = self.config.get_variation_from_id(experiment.key, variation_id) self.config.logger.info('User "%s" is in variation "%s" of experiment %s.' % ( user_id, variation.key, experiment.key )) return variation self.config.logger.info('User "%s" is in no variation.' % user_id) return None
[ "def", "bucket", "(", "self", ",", "experiment", ",", "user_id", ",", "bucketing_id", ")", ":", "if", "not", "experiment", ":", "return", "None", "# Determine if experiment is in a mutually exclusive group", "if", "experiment", ".", "groupPolicy", "in", "GROUP_POLICIES", ":", "group", "=", "self", ".", "config", ".", "get_group", "(", "experiment", ".", "groupId", ")", "if", "not", "group", ":", "return", "None", "user_experiment_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "groupId", ",", "group", ".", "trafficAllocation", ")", "if", "not", "user_experiment_id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no experiment.'", "%", "user_id", ")", "return", "None", "if", "user_experiment_id", "!=", "experiment", ".", "id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is not in experiment \"%s\" of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "return", "None", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in experiment %s of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "# Bucket user if not in white-list and in group (if any)", "variation_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "id", ",", "experiment", ".", "trafficAllocation", ")", "if", "variation_id", ":", "variation", "=", "self", ".", "config", ".", "get_variation_from_id", "(", "experiment", ".", "key", ",", "variation_id", ")", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in variation \"%s\" of experiment %s.'", "%", "(", "user_id", ",", "variation", ".", "key", ",", "experiment", ".", "key", ")", ")", "return", "variation", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no variation.'", "%", "user_id", ")", "return", "None" ]
For a given experiment and bucketing ID determines variation to be shown to user. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for user. bucketing_id: ID to be used for bucketing the user. Returns: Variation in which user with ID user_id will be put in. None if no variation.
[ "For", "a", "given", "experiment", "and", "bucketing", "ID", "determines", "variation", "to", "be", "shown", "to", "user", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/bucketer.py#L94-L147
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig._generate_key_map
def _generate_key_map(entity_list, key, entity_class): """ Helper method to generate map from key to entity object for given list of dicts. Args: entity_list: List consisting of dict. key: Key in each dict which will be key in the map. entity_class: Class representing the entity. Returns: Map mapping key to entity object. """ key_map = {} for obj in entity_list: key_map[obj[key]] = entity_class(**obj) return key_map
python
def _generate_key_map(entity_list, key, entity_class): """ Helper method to generate map from key to entity object for given list of dicts. Args: entity_list: List consisting of dict. key: Key in each dict which will be key in the map. entity_class: Class representing the entity. Returns: Map mapping key to entity object. """ key_map = {} for obj in entity_list: key_map[obj[key]] = entity_class(**obj) return key_map
[ "def", "_generate_key_map", "(", "entity_list", ",", "key", ",", "entity_class", ")", ":", "key_map", "=", "{", "}", "for", "obj", "in", "entity_list", ":", "key_map", "[", "obj", "[", "key", "]", "]", "=", "entity_class", "(", "*", "*", "obj", ")", "return", "key_map" ]
Helper method to generate map from key to entity object for given list of dicts. Args: entity_list: List consisting of dict. key: Key in each dict which will be key in the map. entity_class: Class representing the entity. Returns: Map mapping key to entity object.
[ "Helper", "method", "to", "generate", "map", "from", "key", "to", "entity", "object", "for", "given", "list", "of", "dicts", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L134-L150
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig._deserialize_audience
def _deserialize_audience(audience_map): """ Helper method to de-serialize and populate audience map with the condition list and structure. Args: audience_map: Dict mapping audience ID to audience object. Returns: Dict additionally consisting of condition list and structure on every audience object. """ for audience in audience_map.values(): condition_structure, condition_list = condition_helper.loads(audience.conditions) audience.__dict__.update({ 'conditionStructure': condition_structure, 'conditionList': condition_list }) return audience_map
python
def _deserialize_audience(audience_map): """ Helper method to de-serialize and populate audience map with the condition list and structure. Args: audience_map: Dict mapping audience ID to audience object. Returns: Dict additionally consisting of condition list and structure on every audience object. """ for audience in audience_map.values(): condition_structure, condition_list = condition_helper.loads(audience.conditions) audience.__dict__.update({ 'conditionStructure': condition_structure, 'conditionList': condition_list }) return audience_map
[ "def", "_deserialize_audience", "(", "audience_map", ")", ":", "for", "audience", "in", "audience_map", ".", "values", "(", ")", ":", "condition_structure", ",", "condition_list", "=", "condition_helper", ".", "loads", "(", "audience", ".", "conditions", ")", "audience", ".", "__dict__", ".", "update", "(", "{", "'conditionStructure'", ":", "condition_structure", ",", "'conditionList'", ":", "condition_list", "}", ")", "return", "audience_map" ]
Helper method to de-serialize and populate audience map with the condition list and structure. Args: audience_map: Dict mapping audience ID to audience object. Returns: Dict additionally consisting of condition list and structure on every audience object.
[ "Helper", "method", "to", "de", "-", "serialize", "and", "populate", "audience", "map", "with", "the", "condition", "list", "and", "structure", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L153-L170
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_typecast_value
def get_typecast_value(self, value, type): """ Helper method to determine actual value based on type of feature variable. Args: value: Value in string form as it was parsed from datafile. type: Type denoting the feature flag type. Return: Value type-casted based on type of feature variable. """ if type == entities.Variable.Type.BOOLEAN: return value == 'true' elif type == entities.Variable.Type.INTEGER: return int(value) elif type == entities.Variable.Type.DOUBLE: return float(value) else: return value
python
def get_typecast_value(self, value, type): """ Helper method to determine actual value based on type of feature variable. Args: value: Value in string form as it was parsed from datafile. type: Type denoting the feature flag type. Return: Value type-casted based on type of feature variable. """ if type == entities.Variable.Type.BOOLEAN: return value == 'true' elif type == entities.Variable.Type.INTEGER: return int(value) elif type == entities.Variable.Type.DOUBLE: return float(value) else: return value
[ "def", "get_typecast_value", "(", "self", ",", "value", ",", "type", ")", ":", "if", "type", "==", "entities", ".", "Variable", ".", "Type", ".", "BOOLEAN", ":", "return", "value", "==", "'true'", "elif", "type", "==", "entities", ".", "Variable", ".", "Type", ".", "INTEGER", ":", "return", "int", "(", "value", ")", "elif", "type", "==", "entities", ".", "Variable", ".", "Type", ".", "DOUBLE", ":", "return", "float", "(", "value", ")", "else", ":", "return", "value" ]
Helper method to determine actual value based on type of feature variable. Args: value: Value in string form as it was parsed from datafile. type: Type denoting the feature flag type. Return: Value type-casted based on type of feature variable.
[ "Helper", "method", "to", "determine", "actual", "value", "based", "on", "type", "of", "feature", "variable", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L172-L190
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_experiment_from_key
def get_experiment_from_key(self, experiment_key): """ Get experiment for the provided experiment key. Args: experiment_key: Experiment key for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment key. """ experiment = self.experiment_key_map.get(experiment_key) if experiment: return experiment self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
python
def get_experiment_from_key(self, experiment_key): """ Get experiment for the provided experiment key. Args: experiment_key: Experiment key for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment key. """ experiment = self.experiment_key_map.get(experiment_key) if experiment: return experiment self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
[ "def", "get_experiment_from_key", "(", "self", ",", "experiment_key", ")", ":", "experiment", "=", "self", ".", "experiment_key_map", ".", "get", "(", "experiment_key", ")", "if", "experiment", ":", "return", "experiment", "self", ".", "logger", ".", "error", "(", "'Experiment key \"%s\" is not in datafile.'", "%", "experiment_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Get experiment for the provided experiment key. Args: experiment_key: Experiment key for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment key.
[ "Get", "experiment", "for", "the", "provided", "experiment", "key", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L228-L245
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_experiment_from_id
def get_experiment_from_id(self, experiment_id): """ Get experiment for the provided experiment ID. Args: experiment_id: Experiment ID for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment ID. """ experiment = self.experiment_id_map.get(experiment_id) if experiment: return experiment self.logger.error('Experiment ID "%s" is not in datafile.' % experiment_id) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
python
def get_experiment_from_id(self, experiment_id): """ Get experiment for the provided experiment ID. Args: experiment_id: Experiment ID for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment ID. """ experiment = self.experiment_id_map.get(experiment_id) if experiment: return experiment self.logger.error('Experiment ID "%s" is not in datafile.' % experiment_id) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
[ "def", "get_experiment_from_id", "(", "self", ",", "experiment_id", ")", ":", "experiment", "=", "self", ".", "experiment_id_map", ".", "get", "(", "experiment_id", ")", "if", "experiment", ":", "return", "experiment", "self", ".", "logger", ".", "error", "(", "'Experiment ID \"%s\" is not in datafile.'", "%", "experiment_id", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Get experiment for the provided experiment ID. Args: experiment_id: Experiment ID for which experiment is to be determined. Returns: Experiment corresponding to the provided experiment ID.
[ "Get", "experiment", "for", "the", "provided", "experiment", "ID", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L247-L264
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_group
def get_group(self, group_id): """ Get group for the provided group ID. Args: group_id: Group ID for which group is to be determined. Returns: Group corresponding to the provided group ID. """ group = self.group_id_map.get(group_id) if group: return group self.logger.error('Group ID "%s" is not in datafile.' % group_id) self.error_handler.handle_error(exceptions.InvalidGroupException(enums.Errors.INVALID_GROUP_ID_ERROR)) return None
python
def get_group(self, group_id): """ Get group for the provided group ID. Args: group_id: Group ID for which group is to be determined. Returns: Group corresponding to the provided group ID. """ group = self.group_id_map.get(group_id) if group: return group self.logger.error('Group ID "%s" is not in datafile.' % group_id) self.error_handler.handle_error(exceptions.InvalidGroupException(enums.Errors.INVALID_GROUP_ID_ERROR)) return None
[ "def", "get_group", "(", "self", ",", "group_id", ")", ":", "group", "=", "self", ".", "group_id_map", ".", "get", "(", "group_id", ")", "if", "group", ":", "return", "group", "self", ".", "logger", ".", "error", "(", "'Group ID \"%s\" is not in datafile.'", "%", "group_id", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidGroupException", "(", "enums", ".", "Errors", ".", "INVALID_GROUP_ID_ERROR", ")", ")", "return", "None" ]
Get group for the provided group ID. Args: group_id: Group ID for which group is to be determined. Returns: Group corresponding to the provided group ID.
[ "Get", "group", "for", "the", "provided", "group", "ID", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L266-L283
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_audience
def get_audience(self, audience_id): """ Get audience object for the provided audience ID. Args: audience_id: ID of the audience. Returns: Dict representing the audience. """ audience = self.audience_id_map.get(audience_id) if audience: return audience self.logger.error('Audience ID "%s" is not in datafile.' % audience_id) self.error_handler.handle_error(exceptions.InvalidAudienceException((enums.Errors.INVALID_AUDIENCE_ERROR)))
python
def get_audience(self, audience_id): """ Get audience object for the provided audience ID. Args: audience_id: ID of the audience. Returns: Dict representing the audience. """ audience = self.audience_id_map.get(audience_id) if audience: return audience self.logger.error('Audience ID "%s" is not in datafile.' % audience_id) self.error_handler.handle_error(exceptions.InvalidAudienceException((enums.Errors.INVALID_AUDIENCE_ERROR)))
[ "def", "get_audience", "(", "self", ",", "audience_id", ")", ":", "audience", "=", "self", ".", "audience_id_map", ".", "get", "(", "audience_id", ")", "if", "audience", ":", "return", "audience", "self", ".", "logger", ".", "error", "(", "'Audience ID \"%s\" is not in datafile.'", "%", "audience_id", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidAudienceException", "(", "(", "enums", ".", "Errors", ".", "INVALID_AUDIENCE_ERROR", ")", ")", ")" ]
Get audience object for the provided audience ID. Args: audience_id: ID of the audience. Returns: Dict representing the audience.
[ "Get", "audience", "object", "for", "the", "provided", "audience", "ID", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L285-L300
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_variation_from_key
def get_variation_from_key(self, experiment_key, variation_key): """ Get variation given experiment and variation key. Args: experiment: Key representing parent experiment of variation. variation_key: Key representing the variation. Returns Object representing the variation. """ variation_map = self.variation_key_map.get(experiment_key) if variation_map: variation = variation_map.get(variation_key) if variation: return variation else: self.logger.error('Variation key "%s" is not in datafile.' % variation_key) self.error_handler.handle_error(exceptions.InvalidVariationException(enums.Errors.INVALID_VARIATION_ERROR)) return None self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
python
def get_variation_from_key(self, experiment_key, variation_key): """ Get variation given experiment and variation key. Args: experiment: Key representing parent experiment of variation. variation_key: Key representing the variation. Returns Object representing the variation. """ variation_map = self.variation_key_map.get(experiment_key) if variation_map: variation = variation_map.get(variation_key) if variation: return variation else: self.logger.error('Variation key "%s" is not in datafile.' % variation_key) self.error_handler.handle_error(exceptions.InvalidVariationException(enums.Errors.INVALID_VARIATION_ERROR)) return None self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
[ "def", "get_variation_from_key", "(", "self", ",", "experiment_key", ",", "variation_key", ")", ":", "variation_map", "=", "self", ".", "variation_key_map", ".", "get", "(", "experiment_key", ")", "if", "variation_map", ":", "variation", "=", "variation_map", ".", "get", "(", "variation_key", ")", "if", "variation", ":", "return", "variation", "else", ":", "self", ".", "logger", ".", "error", "(", "'Variation key \"%s\" is not in datafile.'", "%", "variation_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidVariationException", "(", "enums", ".", "Errors", ".", "INVALID_VARIATION_ERROR", ")", ")", "return", "None", "self", ".", "logger", ".", "error", "(", "'Experiment key \"%s\" is not in datafile.'", "%", "experiment_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Get variation given experiment and variation key. Args: experiment: Key representing parent experiment of variation. variation_key: Key representing the variation. Returns Object representing the variation.
[ "Get", "variation", "given", "experiment", "and", "variation", "key", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L302-L326
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_variation_from_id
def get_variation_from_id(self, experiment_key, variation_id): """ Get variation given experiment and variation ID. Args: experiment: Key representing parent experiment of variation. variation_id: ID representing the variation. Returns Object representing the variation. """ variation_map = self.variation_id_map.get(experiment_key) if variation_map: variation = variation_map.get(variation_id) if variation: return variation else: self.logger.error('Variation ID "%s" is not in datafile.' % variation_id) self.error_handler.handle_error(exceptions.InvalidVariationException(enums.Errors.INVALID_VARIATION_ERROR)) return None self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
python
def get_variation_from_id(self, experiment_key, variation_id): """ Get variation given experiment and variation ID. Args: experiment: Key representing parent experiment of variation. variation_id: ID representing the variation. Returns Object representing the variation. """ variation_map = self.variation_id_map.get(experiment_key) if variation_map: variation = variation_map.get(variation_id) if variation: return variation else: self.logger.error('Variation ID "%s" is not in datafile.' % variation_id) self.error_handler.handle_error(exceptions.InvalidVariationException(enums.Errors.INVALID_VARIATION_ERROR)) return None self.logger.error('Experiment key "%s" is not in datafile.' % experiment_key) self.error_handler.handle_error(exceptions.InvalidExperimentException(enums.Errors.INVALID_EXPERIMENT_KEY_ERROR)) return None
[ "def", "get_variation_from_id", "(", "self", ",", "experiment_key", ",", "variation_id", ")", ":", "variation_map", "=", "self", ".", "variation_id_map", ".", "get", "(", "experiment_key", ")", "if", "variation_map", ":", "variation", "=", "variation_map", ".", "get", "(", "variation_id", ")", "if", "variation", ":", "return", "variation", "else", ":", "self", ".", "logger", ".", "error", "(", "'Variation ID \"%s\" is not in datafile.'", "%", "variation_id", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidVariationException", "(", "enums", ".", "Errors", ".", "INVALID_VARIATION_ERROR", ")", ")", "return", "None", "self", ".", "logger", ".", "error", "(", "'Experiment key \"%s\" is not in datafile.'", "%", "experiment_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Get variation given experiment and variation ID. Args: experiment: Key representing parent experiment of variation. variation_id: ID representing the variation. Returns Object representing the variation.
[ "Get", "variation", "given", "experiment", "and", "variation", "ID", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L328-L352
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_event
def get_event(self, event_key): """ Get event for the provided event key. Args: event_key: Event key for which event is to be determined. Returns: Event corresponding to the provided event key. """ event = self.event_key_map.get(event_key) if event: return event self.logger.error('Event "%s" is not in datafile.' % event_key) self.error_handler.handle_error(exceptions.InvalidEventException(enums.Errors.INVALID_EVENT_KEY_ERROR)) return None
python
def get_event(self, event_key): """ Get event for the provided event key. Args: event_key: Event key for which event is to be determined. Returns: Event corresponding to the provided event key. """ event = self.event_key_map.get(event_key) if event: return event self.logger.error('Event "%s" is not in datafile.' % event_key) self.error_handler.handle_error(exceptions.InvalidEventException(enums.Errors.INVALID_EVENT_KEY_ERROR)) return None
[ "def", "get_event", "(", "self", ",", "event_key", ")", ":", "event", "=", "self", ".", "event_key_map", ".", "get", "(", "event_key", ")", "if", "event", ":", "return", "event", "self", ".", "logger", ".", "error", "(", "'Event \"%s\" is not in datafile.'", "%", "event_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidEventException", "(", "enums", ".", "Errors", ".", "INVALID_EVENT_KEY_ERROR", ")", ")", "return", "None" ]
Get event for the provided event key. Args: event_key: Event key for which event is to be determined. Returns: Event corresponding to the provided event key.
[ "Get", "event", "for", "the", "provided", "event", "key", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L354-L371
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_attribute_id
def get_attribute_id(self, attribute_key): """ Get attribute ID for the provided attribute key. Args: attribute_key: Attribute key for which attribute is to be fetched. Returns: Attribute ID corresponding to the provided attribute key. """ attribute = self.attribute_key_map.get(attribute_key) has_reserved_prefix = attribute_key.startswith(RESERVED_ATTRIBUTE_PREFIX) if attribute: if has_reserved_prefix: self.logger.warning(('Attribute %s unexpectedly has reserved prefix %s; using attribute ID ' 'instead of reserved attribute name.' % (attribute_key, RESERVED_ATTRIBUTE_PREFIX))) return attribute.id if has_reserved_prefix: return attribute_key self.logger.error('Attribute "%s" is not in datafile.' % attribute_key) self.error_handler.handle_error(exceptions.InvalidAttributeException(enums.Errors.INVALID_ATTRIBUTE_ERROR)) return None
python
def get_attribute_id(self, attribute_key): """ Get attribute ID for the provided attribute key. Args: attribute_key: Attribute key for which attribute is to be fetched. Returns: Attribute ID corresponding to the provided attribute key. """ attribute = self.attribute_key_map.get(attribute_key) has_reserved_prefix = attribute_key.startswith(RESERVED_ATTRIBUTE_PREFIX) if attribute: if has_reserved_prefix: self.logger.warning(('Attribute %s unexpectedly has reserved prefix %s; using attribute ID ' 'instead of reserved attribute name.' % (attribute_key, RESERVED_ATTRIBUTE_PREFIX))) return attribute.id if has_reserved_prefix: return attribute_key self.logger.error('Attribute "%s" is not in datafile.' % attribute_key) self.error_handler.handle_error(exceptions.InvalidAttributeException(enums.Errors.INVALID_ATTRIBUTE_ERROR)) return None
[ "def", "get_attribute_id", "(", "self", ",", "attribute_key", ")", ":", "attribute", "=", "self", ".", "attribute_key_map", ".", "get", "(", "attribute_key", ")", "has_reserved_prefix", "=", "attribute_key", ".", "startswith", "(", "RESERVED_ATTRIBUTE_PREFIX", ")", "if", "attribute", ":", "if", "has_reserved_prefix", ":", "self", ".", "logger", ".", "warning", "(", "(", "'Attribute %s unexpectedly has reserved prefix %s; using attribute ID '", "'instead of reserved attribute name.'", "%", "(", "attribute_key", ",", "RESERVED_ATTRIBUTE_PREFIX", ")", ")", ")", "return", "attribute", ".", "id", "if", "has_reserved_prefix", ":", "return", "attribute_key", "self", ".", "logger", ".", "error", "(", "'Attribute \"%s\" is not in datafile.'", "%", "attribute_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidAttributeException", "(", "enums", ".", "Errors", ".", "INVALID_ATTRIBUTE_ERROR", ")", ")", "return", "None" ]
Get attribute ID for the provided attribute key. Args: attribute_key: Attribute key for which attribute is to be fetched. Returns: Attribute ID corresponding to the provided attribute key.
[ "Get", "attribute", "ID", "for", "the", "provided", "attribute", "key", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L373-L398
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_feature_from_key
def get_feature_from_key(self, feature_key): """ Get feature for the provided feature key. Args: feature_key: Feature key for which feature is to be fetched. Returns: Feature corresponding to the provided feature key. """ feature = self.feature_key_map.get(feature_key) if feature: return feature self.logger.error('Feature "%s" is not in datafile.' % feature_key) return None
python
def get_feature_from_key(self, feature_key): """ Get feature for the provided feature key. Args: feature_key: Feature key for which feature is to be fetched. Returns: Feature corresponding to the provided feature key. """ feature = self.feature_key_map.get(feature_key) if feature: return feature self.logger.error('Feature "%s" is not in datafile.' % feature_key) return None
[ "def", "get_feature_from_key", "(", "self", ",", "feature_key", ")", ":", "feature", "=", "self", ".", "feature_key_map", ".", "get", "(", "feature_key", ")", "if", "feature", ":", "return", "feature", "self", ".", "logger", ".", "error", "(", "'Feature \"%s\" is not in datafile.'", "%", "feature_key", ")", "return", "None" ]
Get feature for the provided feature key. Args: feature_key: Feature key for which feature is to be fetched. Returns: Feature corresponding to the provided feature key.
[ "Get", "feature", "for", "the", "provided", "feature", "key", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L400-L415
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_rollout_from_id
def get_rollout_from_id(self, rollout_id): """ Get rollout for the provided ID. Args: rollout_id: ID of the rollout to be fetched. Returns: Rollout corresponding to the provided ID. """ layer = self.rollout_id_map.get(rollout_id) if layer: return layer self.logger.error('Rollout with ID "%s" is not in datafile.' % rollout_id) return None
python
def get_rollout_from_id(self, rollout_id): """ Get rollout for the provided ID. Args: rollout_id: ID of the rollout to be fetched. Returns: Rollout corresponding to the provided ID. """ layer = self.rollout_id_map.get(rollout_id) if layer: return layer self.logger.error('Rollout with ID "%s" is not in datafile.' % rollout_id) return None
[ "def", "get_rollout_from_id", "(", "self", ",", "rollout_id", ")", ":", "layer", "=", "self", ".", "rollout_id_map", ".", "get", "(", "rollout_id", ")", "if", "layer", ":", "return", "layer", "self", ".", "logger", ".", "error", "(", "'Rollout with ID \"%s\" is not in datafile.'", "%", "rollout_id", ")", "return", "None" ]
Get rollout for the provided ID. Args: rollout_id: ID of the rollout to be fetched. Returns: Rollout corresponding to the provided ID.
[ "Get", "rollout", "for", "the", "provided", "ID", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L417-L432
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_variable_value_for_variation
def get_variable_value_for_variation(self, variable, variation): """ Get the variable value for the given variation. Args: variable: The Variable for which we are getting the value. variation: The Variation for which we are getting the variable value. Returns: The variable value or None if any of the inputs are invalid. """ if not variable or not variation: return None if variation.id not in self.variation_variable_usage_map: self.logger.error('Variation with ID "%s" is not in the datafile.' % variation.id) return None # Get all variable usages for the given variation variable_usages = self.variation_variable_usage_map[variation.id] # Find usage in given variation variable_usage = None if variable_usages: variable_usage = variable_usages.get(variable.id) if variable_usage: variable_value = variable_usage.value self.logger.info('Value for variable "%s" for variation "%s" is "%s".' % ( variable.key, variation.key, variable_value )) else: variable_value = variable.defaultValue self.logger.info('Variable "%s" is not used in variation "%s". Assigning default value "%s".' % ( variable.key, variation.key, variable_value )) return variable_value
python
def get_variable_value_for_variation(self, variable, variation): """ Get the variable value for the given variation. Args: variable: The Variable for which we are getting the value. variation: The Variation for which we are getting the variable value. Returns: The variable value or None if any of the inputs are invalid. """ if not variable or not variation: return None if variation.id not in self.variation_variable_usage_map: self.logger.error('Variation with ID "%s" is not in the datafile.' % variation.id) return None # Get all variable usages for the given variation variable_usages = self.variation_variable_usage_map[variation.id] # Find usage in given variation variable_usage = None if variable_usages: variable_usage = variable_usages.get(variable.id) if variable_usage: variable_value = variable_usage.value self.logger.info('Value for variable "%s" for variation "%s" is "%s".' % ( variable.key, variation.key, variable_value )) else: variable_value = variable.defaultValue self.logger.info('Variable "%s" is not used in variation "%s". Assigning default value "%s".' % ( variable.key, variation.key, variable_value )) return variable_value
[ "def", "get_variable_value_for_variation", "(", "self", ",", "variable", ",", "variation", ")", ":", "if", "not", "variable", "or", "not", "variation", ":", "return", "None", "if", "variation", ".", "id", "not", "in", "self", ".", "variation_variable_usage_map", ":", "self", ".", "logger", ".", "error", "(", "'Variation with ID \"%s\" is not in the datafile.'", "%", "variation", ".", "id", ")", "return", "None", "# Get all variable usages for the given variation", "variable_usages", "=", "self", ".", "variation_variable_usage_map", "[", "variation", ".", "id", "]", "# Find usage in given variation", "variable_usage", "=", "None", "if", "variable_usages", ":", "variable_usage", "=", "variable_usages", ".", "get", "(", "variable", ".", "id", ")", "if", "variable_usage", ":", "variable_value", "=", "variable_usage", ".", "value", "self", ".", "logger", ".", "info", "(", "'Value for variable \"%s\" for variation \"%s\" is \"%s\".'", "%", "(", "variable", ".", "key", ",", "variation", ".", "key", ",", "variable_value", ")", ")", "else", ":", "variable_value", "=", "variable", ".", "defaultValue", "self", ".", "logger", ".", "info", "(", "'Variable \"%s\" is not used in variation \"%s\". Assigning default value \"%s\".'", "%", "(", "variable", ".", "key", ",", "variation", ".", "key", ",", "variable_value", ")", ")", "return", "variable_value" ]
Get the variable value for the given variation. Args: variable: The Variable for which we are getting the value. variation: The Variation for which we are getting the variable value. Returns: The variable value or None if any of the inputs are invalid.
[ "Get", "the", "variable", "value", "for", "the", "given", "variation", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L434-L476
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_variable_for_feature
def get_variable_for_feature(self, feature_key, variable_key): """ Get the variable with the given variable key for the given feature. Args: feature_key: The key of the feature for which we are getting the variable. variable_key: The key of the variable we are getting. Returns: Variable with the given key in the given variation. """ feature = self.feature_key_map.get(feature_key) if not feature: self.logger.error('Feature with key "%s" not found in the datafile.' % feature_key) return None if variable_key not in feature.variables: self.logger.error('Variable with key "%s" not found in the datafile.' % variable_key) return None return feature.variables.get(variable_key)
python
def get_variable_for_feature(self, feature_key, variable_key): """ Get the variable with the given variable key for the given feature. Args: feature_key: The key of the feature for which we are getting the variable. variable_key: The key of the variable we are getting. Returns: Variable with the given key in the given variation. """ feature = self.feature_key_map.get(feature_key) if not feature: self.logger.error('Feature with key "%s" not found in the datafile.' % feature_key) return None if variable_key not in feature.variables: self.logger.error('Variable with key "%s" not found in the datafile.' % variable_key) return None return feature.variables.get(variable_key)
[ "def", "get_variable_for_feature", "(", "self", ",", "feature_key", ",", "variable_key", ")", ":", "feature", "=", "self", ".", "feature_key_map", ".", "get", "(", "feature_key", ")", "if", "not", "feature", ":", "self", ".", "logger", ".", "error", "(", "'Feature with key \"%s\" not found in the datafile.'", "%", "feature_key", ")", "return", "None", "if", "variable_key", "not", "in", "feature", ".", "variables", ":", "self", ".", "logger", ".", "error", "(", "'Variable with key \"%s\" not found in the datafile.'", "%", "variable_key", ")", "return", "None", "return", "feature", ".", "variables", ".", "get", "(", "variable_key", ")" ]
Get the variable with the given variable key for the given feature. Args: feature_key: The key of the feature for which we are getting the variable. variable_key: The key of the variable we are getting. Returns: Variable with the given key in the given variation.
[ "Get", "the", "variable", "with", "the", "given", "variable", "key", "for", "the", "given", "feature", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L478-L497
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.set_forced_variation
def set_forced_variation(self, experiment_key, user_id, variation_key): """ Sets users to a map of experiments to forced variations. Args: experiment_key: Key for experiment. user_id: The user ID. variation_key: Key for variation. If None, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully. """ experiment = self.get_experiment_from_key(experiment_key) if not experiment: # The invalid experiment key will be logged inside this call. return False experiment_id = experiment.id if variation_key is None: if user_id in self.forced_variation_map: experiment_to_variation_map = self.forced_variation_map.get(user_id) if experiment_id in experiment_to_variation_map: del(self.forced_variation_map[user_id][experiment_id]) self.logger.debug('Variation mapped to experiment "%s" has been removed for user "%s".' % ( experiment_key, user_id )) else: self.logger.debug('Nothing to remove. Variation mapped to experiment "%s" for user "%s" does not exist.' % ( experiment_key, user_id )) else: self.logger.debug('Nothing to remove. User "%s" does not exist in the forced variation map.' % user_id) return True if not validator.is_non_empty_string(variation_key): self.logger.debug('Variation key is invalid.') return False forced_variation = self.get_variation_from_key(experiment_key, variation_key) if not forced_variation: # The invalid variation key will be logged inside this call. return False variation_id = forced_variation.id if user_id not in self.forced_variation_map: self.forced_variation_map[user_id] = {experiment_id: variation_id} else: self.forced_variation_map[user_id][experiment_id] = variation_id self.logger.debug('Set variation "%s" for experiment "%s" and user "%s" in the forced variation map.' % ( variation_id, experiment_id, user_id )) return True
python
def set_forced_variation(self, experiment_key, user_id, variation_key): """ Sets users to a map of experiments to forced variations. Args: experiment_key: Key for experiment. user_id: The user ID. variation_key: Key for variation. If None, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully. """ experiment = self.get_experiment_from_key(experiment_key) if not experiment: # The invalid experiment key will be logged inside this call. return False experiment_id = experiment.id if variation_key is None: if user_id in self.forced_variation_map: experiment_to_variation_map = self.forced_variation_map.get(user_id) if experiment_id in experiment_to_variation_map: del(self.forced_variation_map[user_id][experiment_id]) self.logger.debug('Variation mapped to experiment "%s" has been removed for user "%s".' % ( experiment_key, user_id )) else: self.logger.debug('Nothing to remove. Variation mapped to experiment "%s" for user "%s" does not exist.' % ( experiment_key, user_id )) else: self.logger.debug('Nothing to remove. User "%s" does not exist in the forced variation map.' % user_id) return True if not validator.is_non_empty_string(variation_key): self.logger.debug('Variation key is invalid.') return False forced_variation = self.get_variation_from_key(experiment_key, variation_key) if not forced_variation: # The invalid variation key will be logged inside this call. return False variation_id = forced_variation.id if user_id not in self.forced_variation_map: self.forced_variation_map[user_id] = {experiment_id: variation_id} else: self.forced_variation_map[user_id][experiment_id] = variation_id self.logger.debug('Set variation "%s" for experiment "%s" and user "%s" in the forced variation map.' % ( variation_id, experiment_id, user_id )) return True
[ "def", "set_forced_variation", "(", "self", ",", "experiment_key", ",", "user_id", ",", "variation_key", ")", ":", "experiment", "=", "self", ".", "get_experiment_from_key", "(", "experiment_key", ")", "if", "not", "experiment", ":", "# The invalid experiment key will be logged inside this call.", "return", "False", "experiment_id", "=", "experiment", ".", "id", "if", "variation_key", "is", "None", ":", "if", "user_id", "in", "self", ".", "forced_variation_map", ":", "experiment_to_variation_map", "=", "self", ".", "forced_variation_map", ".", "get", "(", "user_id", ")", "if", "experiment_id", "in", "experiment_to_variation_map", ":", "del", "(", "self", ".", "forced_variation_map", "[", "user_id", "]", "[", "experiment_id", "]", ")", "self", ".", "logger", ".", "debug", "(", "'Variation mapped to experiment \"%s\" has been removed for user \"%s\".'", "%", "(", "experiment_key", ",", "user_id", ")", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "'Nothing to remove. Variation mapped to experiment \"%s\" for user \"%s\" does not exist.'", "%", "(", "experiment_key", ",", "user_id", ")", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "'Nothing to remove. User \"%s\" does not exist in the forced variation map.'", "%", "user_id", ")", "return", "True", "if", "not", "validator", ".", "is_non_empty_string", "(", "variation_key", ")", ":", "self", ".", "logger", ".", "debug", "(", "'Variation key is invalid.'", ")", "return", "False", "forced_variation", "=", "self", ".", "get_variation_from_key", "(", "experiment_key", ",", "variation_key", ")", "if", "not", "forced_variation", ":", "# The invalid variation key will be logged inside this call.", "return", "False", "variation_id", "=", "forced_variation", ".", "id", "if", "user_id", "not", "in", "self", ".", "forced_variation_map", ":", "self", ".", "forced_variation_map", "[", "user_id", "]", "=", "{", "experiment_id", ":", "variation_id", "}", "else", ":", "self", ".", "forced_variation_map", "[", "user_id", "]", "[", "experiment_id", "]", "=", "variation_id", "self", ".", "logger", ".", "debug", "(", "'Set variation \"%s\" for experiment \"%s\" and user \"%s\" in the forced variation map.'", "%", "(", "variation_id", ",", "experiment_id", ",", "user_id", ")", ")", "return", "True" ]
Sets users to a map of experiments to forced variations. Args: experiment_key: Key for experiment. user_id: The user ID. variation_key: Key for variation. If None, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully.
[ "Sets", "users", "to", "a", "map", "of", "experiments", "to", "forced", "variations", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L499-L555
train
optimizely/python-sdk
optimizely/project_config.py
ProjectConfig.get_forced_variation
def get_forced_variation(self, experiment_key, user_id): """ Gets the forced variation key for the given user and experiment. Args: experiment_key: Key for experiment. user_id: The user ID. Returns: The variation which the given user and experiment should be forced into. """ if user_id not in self.forced_variation_map: self.logger.debug('User "%s" is not in the forced variation map.' % user_id) return None experiment = self.get_experiment_from_key(experiment_key) if not experiment: # The invalid experiment key will be logged inside this call. return None experiment_to_variation_map = self.forced_variation_map.get(user_id) if not experiment_to_variation_map: self.logger.debug('No experiment "%s" mapped to user "%s" in the forced variation map.' % ( experiment_key, user_id )) return None variation_id = experiment_to_variation_map.get(experiment.id) if variation_id is None: self.logger.debug( 'No variation mapped to experiment "%s" in the forced variation map.' % experiment_key ) return None variation = self.get_variation_from_id(experiment_key, variation_id) self.logger.debug('Variation "%s" is mapped to experiment "%s" and user "%s" in the forced variation map' % ( variation.key, experiment_key, user_id )) return variation
python
def get_forced_variation(self, experiment_key, user_id): """ Gets the forced variation key for the given user and experiment. Args: experiment_key: Key for experiment. user_id: The user ID. Returns: The variation which the given user and experiment should be forced into. """ if user_id not in self.forced_variation_map: self.logger.debug('User "%s" is not in the forced variation map.' % user_id) return None experiment = self.get_experiment_from_key(experiment_key) if not experiment: # The invalid experiment key will be logged inside this call. return None experiment_to_variation_map = self.forced_variation_map.get(user_id) if not experiment_to_variation_map: self.logger.debug('No experiment "%s" mapped to user "%s" in the forced variation map.' % ( experiment_key, user_id )) return None variation_id = experiment_to_variation_map.get(experiment.id) if variation_id is None: self.logger.debug( 'No variation mapped to experiment "%s" in the forced variation map.' % experiment_key ) return None variation = self.get_variation_from_id(experiment_key, variation_id) self.logger.debug('Variation "%s" is mapped to experiment "%s" and user "%s" in the forced variation map' % ( variation.key, experiment_key, user_id )) return variation
[ "def", "get_forced_variation", "(", "self", ",", "experiment_key", ",", "user_id", ")", ":", "if", "user_id", "not", "in", "self", ".", "forced_variation_map", ":", "self", ".", "logger", ".", "debug", "(", "'User \"%s\" is not in the forced variation map.'", "%", "user_id", ")", "return", "None", "experiment", "=", "self", ".", "get_experiment_from_key", "(", "experiment_key", ")", "if", "not", "experiment", ":", "# The invalid experiment key will be logged inside this call.", "return", "None", "experiment_to_variation_map", "=", "self", ".", "forced_variation_map", ".", "get", "(", "user_id", ")", "if", "not", "experiment_to_variation_map", ":", "self", ".", "logger", ".", "debug", "(", "'No experiment \"%s\" mapped to user \"%s\" in the forced variation map.'", "%", "(", "experiment_key", ",", "user_id", ")", ")", "return", "None", "variation_id", "=", "experiment_to_variation_map", ".", "get", "(", "experiment", ".", "id", ")", "if", "variation_id", "is", "None", ":", "self", ".", "logger", ".", "debug", "(", "'No variation mapped to experiment \"%s\" in the forced variation map.'", "%", "experiment_key", ")", "return", "None", "variation", "=", "self", ".", "get_variation_from_id", "(", "experiment_key", ",", "variation_id", ")", "self", ".", "logger", ".", "debug", "(", "'Variation \"%s\" is mapped to experiment \"%s\" and user \"%s\" in the forced variation map'", "%", "(", "variation", ".", "key", ",", "experiment_key", ",", "user_id", ")", ")", "return", "variation" ]
Gets the forced variation key for the given user and experiment. Args: experiment_key: Key for experiment. user_id: The user ID. Returns: The variation which the given user and experiment should be forced into.
[ "Gets", "the", "forced", "variation", "key", "for", "the", "given", "user", "and", "experiment", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/project_config.py#L557-L600
train
optimizely/python-sdk
optimizely/event_dispatcher.py
EventDispatcher.dispatch_event
def dispatch_event(event): """ Dispatch the event being represented by the Event object. Args: event: Object holding information about the request to be dispatched to the Optimizely backend. """ try: if event.http_verb == enums.HTTPVerbs.GET: requests.get(event.url, params=event.params, timeout=REQUEST_TIMEOUT).raise_for_status() elif event.http_verb == enums.HTTPVerbs.POST: requests.post( event.url, data=json.dumps(event.params), headers=event.headers, timeout=REQUEST_TIMEOUT ).raise_for_status() except request_exception.RequestException as error: logging.error('Dispatch event failed. Error: %s' % str(error))
python
def dispatch_event(event): """ Dispatch the event being represented by the Event object. Args: event: Object holding information about the request to be dispatched to the Optimizely backend. """ try: if event.http_verb == enums.HTTPVerbs.GET: requests.get(event.url, params=event.params, timeout=REQUEST_TIMEOUT).raise_for_status() elif event.http_verb == enums.HTTPVerbs.POST: requests.post( event.url, data=json.dumps(event.params), headers=event.headers, timeout=REQUEST_TIMEOUT ).raise_for_status() except request_exception.RequestException as error: logging.error('Dispatch event failed. Error: %s' % str(error))
[ "def", "dispatch_event", "(", "event", ")", ":", "try", ":", "if", "event", ".", "http_verb", "==", "enums", ".", "HTTPVerbs", ".", "GET", ":", "requests", ".", "get", "(", "event", ".", "url", ",", "params", "=", "event", ".", "params", ",", "timeout", "=", "REQUEST_TIMEOUT", ")", ".", "raise_for_status", "(", ")", "elif", "event", ".", "http_verb", "==", "enums", ".", "HTTPVerbs", ".", "POST", ":", "requests", ".", "post", "(", "event", ".", "url", ",", "data", "=", "json", ".", "dumps", "(", "event", ".", "params", ")", ",", "headers", "=", "event", ".", "headers", ",", "timeout", "=", "REQUEST_TIMEOUT", ")", ".", "raise_for_status", "(", ")", "except", "request_exception", ".", "RequestException", "as", "error", ":", "logging", ".", "error", "(", "'Dispatch event failed. Error: %s'", "%", "str", "(", "error", ")", ")" ]
Dispatch the event being represented by the Event object. Args: event: Object holding information about the request to be dispatched to the Optimizely backend.
[ "Dispatch", "the", "event", "being", "represented", "by", "the", "Event", "object", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_dispatcher.py#L28-L44
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely._validate_instantiation_options
def _validate_instantiation_options(self, datafile, skip_json_validation): """ Helper method to validate all instantiation parameters. Args: datafile: JSON string representing the project. skip_json_validation: Boolean representing whether JSON schema validation needs to be skipped or not. Raises: Exception if provided instantiation options are valid. """ if not skip_json_validation and not validator.is_datafile_valid(datafile): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('datafile')) if not validator.is_event_dispatcher_valid(self.event_dispatcher): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('event_dispatcher')) if not validator.is_logger_valid(self.logger): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('logger')) if not validator.is_error_handler_valid(self.error_handler): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('error_handler'))
python
def _validate_instantiation_options(self, datafile, skip_json_validation): """ Helper method to validate all instantiation parameters. Args: datafile: JSON string representing the project. skip_json_validation: Boolean representing whether JSON schema validation needs to be skipped or not. Raises: Exception if provided instantiation options are valid. """ if not skip_json_validation and not validator.is_datafile_valid(datafile): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('datafile')) if not validator.is_event_dispatcher_valid(self.event_dispatcher): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('event_dispatcher')) if not validator.is_logger_valid(self.logger): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('logger')) if not validator.is_error_handler_valid(self.error_handler): raise exceptions.InvalidInputException(enums.Errors.INVALID_INPUT_ERROR.format('error_handler'))
[ "def", "_validate_instantiation_options", "(", "self", ",", "datafile", ",", "skip_json_validation", ")", ":", "if", "not", "skip_json_validation", "and", "not", "validator", ".", "is_datafile_valid", "(", "datafile", ")", ":", "raise", "exceptions", ".", "InvalidInputException", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'datafile'", ")", ")", "if", "not", "validator", ".", "is_event_dispatcher_valid", "(", "self", ".", "event_dispatcher", ")", ":", "raise", "exceptions", ".", "InvalidInputException", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'event_dispatcher'", ")", ")", "if", "not", "validator", ".", "is_logger_valid", "(", "self", ".", "logger", ")", ":", "raise", "exceptions", ".", "InvalidInputException", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'logger'", ")", ")", "if", "not", "validator", ".", "is_error_handler_valid", "(", "self", ".", "error_handler", ")", ":", "raise", "exceptions", ".", "InvalidInputException", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'error_handler'", ")", ")" ]
Helper method to validate all instantiation parameters. Args: datafile: JSON string representing the project. skip_json_validation: Boolean representing whether JSON schema validation needs to be skipped or not. Raises: Exception if provided instantiation options are valid.
[ "Helper", "method", "to", "validate", "all", "instantiation", "parameters", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L89-L110
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely._validate_user_inputs
def _validate_user_inputs(self, attributes=None, event_tags=None): """ Helper method to validate user inputs. Args: attributes: Dict representing user attributes. event_tags: Dict representing metadata associated with an event. Returns: Boolean True if inputs are valid. False otherwise. """ if attributes and not validator.are_attributes_valid(attributes): self.logger.error('Provided attributes are in an invalid format.') self.error_handler.handle_error(exceptions.InvalidAttributeException(enums.Errors.INVALID_ATTRIBUTE_FORMAT)) return False if event_tags and not validator.are_event_tags_valid(event_tags): self.logger.error('Provided event tags are in an invalid format.') self.error_handler.handle_error(exceptions.InvalidEventTagException(enums.Errors.INVALID_EVENT_TAG_FORMAT)) return False return True
python
def _validate_user_inputs(self, attributes=None, event_tags=None): """ Helper method to validate user inputs. Args: attributes: Dict representing user attributes. event_tags: Dict representing metadata associated with an event. Returns: Boolean True if inputs are valid. False otherwise. """ if attributes and not validator.are_attributes_valid(attributes): self.logger.error('Provided attributes are in an invalid format.') self.error_handler.handle_error(exceptions.InvalidAttributeException(enums.Errors.INVALID_ATTRIBUTE_FORMAT)) return False if event_tags and not validator.are_event_tags_valid(event_tags): self.logger.error('Provided event tags are in an invalid format.') self.error_handler.handle_error(exceptions.InvalidEventTagException(enums.Errors.INVALID_EVENT_TAG_FORMAT)) return False return True
[ "def", "_validate_user_inputs", "(", "self", ",", "attributes", "=", "None", ",", "event_tags", "=", "None", ")", ":", "if", "attributes", "and", "not", "validator", ".", "are_attributes_valid", "(", "attributes", ")", ":", "self", ".", "logger", ".", "error", "(", "'Provided attributes are in an invalid format.'", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidAttributeException", "(", "enums", ".", "Errors", ".", "INVALID_ATTRIBUTE_FORMAT", ")", ")", "return", "False", "if", "event_tags", "and", "not", "validator", ".", "are_event_tags_valid", "(", "event_tags", ")", ":", "self", ".", "logger", ".", "error", "(", "'Provided event tags are in an invalid format.'", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidEventTagException", "(", "enums", ".", "Errors", ".", "INVALID_EVENT_TAG_FORMAT", ")", ")", "return", "False", "return", "True" ]
Helper method to validate user inputs. Args: attributes: Dict representing user attributes. event_tags: Dict representing metadata associated with an event. Returns: Boolean True if inputs are valid. False otherwise.
[ "Helper", "method", "to", "validate", "user", "inputs", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L112-L134
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely._send_impression_event
def _send_impression_event(self, experiment, variation, user_id, attributes): """ Helper method to send impression event. Args: experiment: Experiment for which impression event is being sent. variation: Variation picked for user for the given experiment. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. """ impression_event = self.event_builder.create_impression_event(experiment, variation.id, user_id, attributes) self.logger.debug('Dispatching impression event to URL %s with params %s.' % ( impression_event.url, impression_event.params )) try: self.event_dispatcher.dispatch_event(impression_event) except: self.logger.exception('Unable to dispatch impression event!') self.notification_center.send_notifications(enums.NotificationTypes.ACTIVATE, experiment, user_id, attributes, variation, impression_event)
python
def _send_impression_event(self, experiment, variation, user_id, attributes): """ Helper method to send impression event. Args: experiment: Experiment for which impression event is being sent. variation: Variation picked for user for the given experiment. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. """ impression_event = self.event_builder.create_impression_event(experiment, variation.id, user_id, attributes) self.logger.debug('Dispatching impression event to URL %s with params %s.' % ( impression_event.url, impression_event.params )) try: self.event_dispatcher.dispatch_event(impression_event) except: self.logger.exception('Unable to dispatch impression event!') self.notification_center.send_notifications(enums.NotificationTypes.ACTIVATE, experiment, user_id, attributes, variation, impression_event)
[ "def", "_send_impression_event", "(", "self", ",", "experiment", ",", "variation", ",", "user_id", ",", "attributes", ")", ":", "impression_event", "=", "self", ".", "event_builder", ".", "create_impression_event", "(", "experiment", ",", "variation", ".", "id", ",", "user_id", ",", "attributes", ")", "self", ".", "logger", ".", "debug", "(", "'Dispatching impression event to URL %s with params %s.'", "%", "(", "impression_event", ".", "url", ",", "impression_event", ".", "params", ")", ")", "try", ":", "self", ".", "event_dispatcher", ".", "dispatch_event", "(", "impression_event", ")", "except", ":", "self", ".", "logger", ".", "exception", "(", "'Unable to dispatch impression event!'", ")", "self", ".", "notification_center", ".", "send_notifications", "(", "enums", ".", "NotificationTypes", ".", "ACTIVATE", ",", "experiment", ",", "user_id", ",", "attributes", ",", "variation", ",", "impression_event", ")" ]
Helper method to send impression event. Args: experiment: Experiment for which impression event is being sent. variation: Variation picked for user for the given experiment. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded.
[ "Helper", "method", "to", "send", "impression", "event", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L136-L162
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely._get_feature_variable_for_type
def _get_feature_variable_for_type(self, feature_key, variable_key, variable_type, user_id, attributes): """ Helper method to determine value for a certain variable attached to a feature flag based on type of variable. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. variable_type: Type of variable which could be one of boolean/double/integer/string. user_id: ID for user. attributes: Dict representing user attributes. Returns: Value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ if not validator.is_non_empty_string(feature_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('feature_key')) return None if not validator.is_non_empty_string(variable_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('variable_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None if not self._validate_user_inputs(attributes): return None feature_flag = self.config.get_feature_from_key(feature_key) if not feature_flag: return None variable = self.config.get_variable_for_feature(feature_key, variable_key) if not variable: return None # Return None if type differs if variable.type != variable_type: self.logger.warning( 'Requested variable type "%s", but variable is of type "%s". ' 'Use correct API to retrieve value. Returning None.' % (variable_type, variable.type) ) return None feature_enabled = False source_info = {} variable_value = variable.defaultValue decision = self.decision_service.get_variation_for_feature(feature_flag, user_id, attributes) if decision.variation: feature_enabled = decision.variation.featureEnabled if feature_enabled: variable_value = self.config.get_variable_value_for_variation(variable, decision.variation) self.logger.info( 'Got variable value "%s" for variable "%s" of feature flag "%s".' % ( variable_value, variable_key, feature_key ) ) else: self.logger.info( 'Feature "%s" for variation "%s" is not enabled. ' 'Returning the default variable value "%s".' % (feature_key, decision.variation.key, variable_value) ) else: self.logger.info( 'User "%s" is not in any variation or rollout rule. ' 'Returning default value for variable "%s" of feature flag "%s".' % (user_id, variable_key, feature_key) ) if decision.source == enums.DecisionSources.FEATURE_TEST: source_info = { 'experiment_key': decision.experiment.key, 'variation_key': decision.variation.key } try: actual_value = self.config.get_typecast_value(variable_value, variable_type) except: self.logger.error('Unable to cast value. Returning None.') actual_value = None self.notification_center.send_notifications( enums.NotificationTypes.DECISION, enums.DecisionNotificationTypes.FEATURE_VARIABLE, user_id, attributes or {}, { 'feature_key': feature_key, 'feature_enabled': feature_enabled, 'source': decision.source, 'variable_key': variable_key, 'variable_value': actual_value, 'variable_type': variable_type, 'source_info': source_info } ) return actual_value
python
def _get_feature_variable_for_type(self, feature_key, variable_key, variable_type, user_id, attributes): """ Helper method to determine value for a certain variable attached to a feature flag based on type of variable. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. variable_type: Type of variable which could be one of boolean/double/integer/string. user_id: ID for user. attributes: Dict representing user attributes. Returns: Value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ if not validator.is_non_empty_string(feature_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('feature_key')) return None if not validator.is_non_empty_string(variable_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('variable_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None if not self._validate_user_inputs(attributes): return None feature_flag = self.config.get_feature_from_key(feature_key) if not feature_flag: return None variable = self.config.get_variable_for_feature(feature_key, variable_key) if not variable: return None # Return None if type differs if variable.type != variable_type: self.logger.warning( 'Requested variable type "%s", but variable is of type "%s". ' 'Use correct API to retrieve value. Returning None.' % (variable_type, variable.type) ) return None feature_enabled = False source_info = {} variable_value = variable.defaultValue decision = self.decision_service.get_variation_for_feature(feature_flag, user_id, attributes) if decision.variation: feature_enabled = decision.variation.featureEnabled if feature_enabled: variable_value = self.config.get_variable_value_for_variation(variable, decision.variation) self.logger.info( 'Got variable value "%s" for variable "%s" of feature flag "%s".' % ( variable_value, variable_key, feature_key ) ) else: self.logger.info( 'Feature "%s" for variation "%s" is not enabled. ' 'Returning the default variable value "%s".' % (feature_key, decision.variation.key, variable_value) ) else: self.logger.info( 'User "%s" is not in any variation or rollout rule. ' 'Returning default value for variable "%s" of feature flag "%s".' % (user_id, variable_key, feature_key) ) if decision.source == enums.DecisionSources.FEATURE_TEST: source_info = { 'experiment_key': decision.experiment.key, 'variation_key': decision.variation.key } try: actual_value = self.config.get_typecast_value(variable_value, variable_type) except: self.logger.error('Unable to cast value. Returning None.') actual_value = None self.notification_center.send_notifications( enums.NotificationTypes.DECISION, enums.DecisionNotificationTypes.FEATURE_VARIABLE, user_id, attributes or {}, { 'feature_key': feature_key, 'feature_enabled': feature_enabled, 'source': decision.source, 'variable_key': variable_key, 'variable_value': actual_value, 'variable_type': variable_type, 'source_info': source_info } ) return actual_value
[ "def", "_get_feature_variable_for_type", "(", "self", ",", "feature_key", ",", "variable_key", ",", "variable_type", ",", "user_id", ",", "attributes", ")", ":", "if", "not", "validator", ".", "is_non_empty_string", "(", "feature_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'feature_key'", ")", ")", "return", "None", "if", "not", "validator", ".", "is_non_empty_string", "(", "variable_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'variable_key'", ")", ")", "return", "None", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "None", "if", "not", "self", ".", "_validate_user_inputs", "(", "attributes", ")", ":", "return", "None", "feature_flag", "=", "self", ".", "config", ".", "get_feature_from_key", "(", "feature_key", ")", "if", "not", "feature_flag", ":", "return", "None", "variable", "=", "self", ".", "config", ".", "get_variable_for_feature", "(", "feature_key", ",", "variable_key", ")", "if", "not", "variable", ":", "return", "None", "# Return None if type differs", "if", "variable", ".", "type", "!=", "variable_type", ":", "self", ".", "logger", ".", "warning", "(", "'Requested variable type \"%s\", but variable is of type \"%s\". '", "'Use correct API to retrieve value. Returning None.'", "%", "(", "variable_type", ",", "variable", ".", "type", ")", ")", "return", "None", "feature_enabled", "=", "False", "source_info", "=", "{", "}", "variable_value", "=", "variable", ".", "defaultValue", "decision", "=", "self", ".", "decision_service", ".", "get_variation_for_feature", "(", "feature_flag", ",", "user_id", ",", "attributes", ")", "if", "decision", ".", "variation", ":", "feature_enabled", "=", "decision", ".", "variation", ".", "featureEnabled", "if", "feature_enabled", ":", "variable_value", "=", "self", ".", "config", ".", "get_variable_value_for_variation", "(", "variable", ",", "decision", ".", "variation", ")", "self", ".", "logger", ".", "info", "(", "'Got variable value \"%s\" for variable \"%s\" of feature flag \"%s\".'", "%", "(", "variable_value", ",", "variable_key", ",", "feature_key", ")", ")", "else", ":", "self", ".", "logger", ".", "info", "(", "'Feature \"%s\" for variation \"%s\" is not enabled. '", "'Returning the default variable value \"%s\".'", "%", "(", "feature_key", ",", "decision", ".", "variation", ".", "key", ",", "variable_value", ")", ")", "else", ":", "self", ".", "logger", ".", "info", "(", "'User \"%s\" is not in any variation or rollout rule. '", "'Returning default value for variable \"%s\" of feature flag \"%s\".'", "%", "(", "user_id", ",", "variable_key", ",", "feature_key", ")", ")", "if", "decision", ".", "source", "==", "enums", ".", "DecisionSources", ".", "FEATURE_TEST", ":", "source_info", "=", "{", "'experiment_key'", ":", "decision", ".", "experiment", ".", "key", ",", "'variation_key'", ":", "decision", ".", "variation", ".", "key", "}", "try", ":", "actual_value", "=", "self", ".", "config", ".", "get_typecast_value", "(", "variable_value", ",", "variable_type", ")", "except", ":", "self", ".", "logger", ".", "error", "(", "'Unable to cast value. Returning None.'", ")", "actual_value", "=", "None", "self", ".", "notification_center", ".", "send_notifications", "(", "enums", ".", "NotificationTypes", ".", "DECISION", ",", "enums", ".", "DecisionNotificationTypes", ".", "FEATURE_VARIABLE", ",", "user_id", ",", "attributes", "or", "{", "}", ",", "{", "'feature_key'", ":", "feature_key", ",", "'feature_enabled'", ":", "feature_enabled", ",", "'source'", ":", "decision", ".", "source", ",", "'variable_key'", ":", "variable_key", ",", "'variable_value'", ":", "actual_value", ",", "'variable_type'", ":", "variable_type", ",", "'source_info'", ":", "source_info", "}", ")", "return", "actual_value" ]
Helper method to determine value for a certain variable attached to a feature flag based on type of variable. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. variable_type: Type of variable which could be one of boolean/double/integer/string. user_id: ID for user. attributes: Dict representing user attributes. Returns: Value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable.
[ "Helper", "method", "to", "determine", "value", "for", "a", "certain", "variable", "attached", "to", "a", "feature", "flag", "based", "on", "type", "of", "variable", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L164-L263
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.activate
def activate(self, experiment_key, user_id, attributes=None): """ Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('activate')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None variation_key = self.get_variation(experiment_key, user_id, attributes) if not variation_key: self.logger.info('Not activating user "%s".' % user_id) return None experiment = self.config.get_experiment_from_key(experiment_key) variation = self.config.get_variation_from_key(experiment_key, variation_key) # Create and dispatch impression event self.logger.info('Activating user "%s" in experiment "%s".' % (user_id, experiment.key)) self._send_impression_event(experiment, variation, user_id, attributes) return variation.key
python
def activate(self, experiment_key, user_id, attributes=None): """ Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('activate')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None variation_key = self.get_variation(experiment_key, user_id, attributes) if not variation_key: self.logger.info('Not activating user "%s".' % user_id) return None experiment = self.config.get_experiment_from_key(experiment_key) variation = self.config.get_variation_from_key(experiment_key, variation_key) # Create and dispatch impression event self.logger.info('Activating user "%s" in experiment "%s".' % (user_id, experiment.key)) self._send_impression_event(experiment, variation, user_id, attributes) return variation.key
[ "def", "activate", "(", "self", ",", "experiment_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'activate'", ")", ")", "return", "None", "if", "not", "validator", ".", "is_non_empty_string", "(", "experiment_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'experiment_key'", ")", ")", "return", "None", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "None", "variation_key", "=", "self", ".", "get_variation", "(", "experiment_key", ",", "user_id", ",", "attributes", ")", "if", "not", "variation_key", ":", "self", ".", "logger", ".", "info", "(", "'Not activating user \"%s\".'", "%", "user_id", ")", "return", "None", "experiment", "=", "self", ".", "config", ".", "get_experiment_from_key", "(", "experiment_key", ")", "variation", "=", "self", ".", "config", ".", "get_variation_from_key", "(", "experiment_key", ",", "variation_key", ")", "# Create and dispatch impression event", "self", ".", "logger", ".", "info", "(", "'Activating user \"%s\" in experiment \"%s\".'", "%", "(", "user_id", ",", "experiment", ".", "key", ")", ")", "self", ".", "_send_impression_event", "(", "experiment", ",", "variation", ",", "user_id", ",", "attributes", ")", "return", "variation", ".", "key" ]
Buckets visitor and sends impression event to Optimizely. Args: experiment_key: Experiment which needs to be activated. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running.
[ "Buckets", "visitor", "and", "sends", "impression", "event", "to", "Optimizely", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L265-L303
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.track
def track(self, event_key, user_id, attributes=None, event_tags=None): """ Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('track')) return if not validator.is_non_empty_string(event_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('event_key')) return if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return if not self._validate_user_inputs(attributes, event_tags): return event = self.config.get_event(event_key) if not event: self.logger.info('Not tracking user "%s" for event "%s".' % (user_id, event_key)) return conversion_event = self.event_builder.create_conversion_event(event_key, user_id, attributes, event_tags) self.logger.info('Tracking event "%s" for user "%s".' % (event_key, user_id)) self.logger.debug('Dispatching conversion event to URL %s with params %s.' % ( conversion_event.url, conversion_event.params )) try: self.event_dispatcher.dispatch_event(conversion_event) except: self.logger.exception('Unable to dispatch conversion event!') self.notification_center.send_notifications(enums.NotificationTypes.TRACK, event_key, user_id, attributes, event_tags, conversion_event)
python
def track(self, event_key, user_id, attributes=None, event_tags=None): """ Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('track')) return if not validator.is_non_empty_string(event_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('event_key')) return if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return if not self._validate_user_inputs(attributes, event_tags): return event = self.config.get_event(event_key) if not event: self.logger.info('Not tracking user "%s" for event "%s".' % (user_id, event_key)) return conversion_event = self.event_builder.create_conversion_event(event_key, user_id, attributes, event_tags) self.logger.info('Tracking event "%s" for user "%s".' % (event_key, user_id)) self.logger.debug('Dispatching conversion event to URL %s with params %s.' % ( conversion_event.url, conversion_event.params )) try: self.event_dispatcher.dispatch_event(conversion_event) except: self.logger.exception('Unable to dispatch conversion event!') self.notification_center.send_notifications(enums.NotificationTypes.TRACK, event_key, user_id, attributes, event_tags, conversion_event)
[ "def", "track", "(", "self", ",", "event_key", ",", "user_id", ",", "attributes", "=", "None", ",", "event_tags", "=", "None", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'track'", ")", ")", "return", "if", "not", "validator", ".", "is_non_empty_string", "(", "event_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'event_key'", ")", ")", "return", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "if", "not", "self", ".", "_validate_user_inputs", "(", "attributes", ",", "event_tags", ")", ":", "return", "event", "=", "self", ".", "config", ".", "get_event", "(", "event_key", ")", "if", "not", "event", ":", "self", ".", "logger", ".", "info", "(", "'Not tracking user \"%s\" for event \"%s\".'", "%", "(", "user_id", ",", "event_key", ")", ")", "return", "conversion_event", "=", "self", ".", "event_builder", ".", "create_conversion_event", "(", "event_key", ",", "user_id", ",", "attributes", ",", "event_tags", ")", "self", ".", "logger", ".", "info", "(", "'Tracking event \"%s\" for user \"%s\".'", "%", "(", "event_key", ",", "user_id", ")", ")", "self", ".", "logger", ".", "debug", "(", "'Dispatching conversion event to URL %s with params %s.'", "%", "(", "conversion_event", ".", "url", ",", "conversion_event", ".", "params", ")", ")", "try", ":", "self", ".", "event_dispatcher", ".", "dispatch_event", "(", "conversion_event", ")", "except", ":", "self", ".", "logger", ".", "exception", "(", "'Unable to dispatch conversion event!'", ")", "self", ".", "notification_center", ".", "send_notifications", "(", "enums", ".", "NotificationTypes", ".", "TRACK", ",", "event_key", ",", "user_id", ",", "attributes", ",", "event_tags", ",", "conversion_event", ")" ]
Send conversion event to Optimizely. Args: event_key: Event key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing visitor attributes and values which need to be recorded. event_tags: Dict representing metadata associated with the event.
[ "Send", "conversion", "event", "to", "Optimizely", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L305-L346
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_variation
def get_variation(self, experiment_key, user_id, attributes=None): """ Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_variation')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None experiment = self.config.get_experiment_from_key(experiment_key) variation_key = None if not experiment: self.logger.info('Experiment key "%s" is invalid. Not activating user "%s".' % ( experiment_key, user_id )) return None if not self._validate_user_inputs(attributes): return None variation = self.decision_service.get_variation(experiment, user_id, attributes) if variation: variation_key = variation.key if self.config.is_feature_experiment(experiment.id): decision_notification_type = enums.DecisionNotificationTypes.FEATURE_TEST else: decision_notification_type = enums.DecisionNotificationTypes.AB_TEST self.notification_center.send_notifications( enums.NotificationTypes.DECISION, decision_notification_type, user_id, attributes or {}, { 'experiment_key': experiment_key, 'variation_key': variation_key } ) return variation_key
python
def get_variation(self, experiment_key, user_id, attributes=None): """ Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_variation')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None experiment = self.config.get_experiment_from_key(experiment_key) variation_key = None if not experiment: self.logger.info('Experiment key "%s" is invalid. Not activating user "%s".' % ( experiment_key, user_id )) return None if not self._validate_user_inputs(attributes): return None variation = self.decision_service.get_variation(experiment, user_id, attributes) if variation: variation_key = variation.key if self.config.is_feature_experiment(experiment.id): decision_notification_type = enums.DecisionNotificationTypes.FEATURE_TEST else: decision_notification_type = enums.DecisionNotificationTypes.AB_TEST self.notification_center.send_notifications( enums.NotificationTypes.DECISION, decision_notification_type, user_id, attributes or {}, { 'experiment_key': experiment_key, 'variation_key': variation_key } ) return variation_key
[ "def", "get_variation", "(", "self", ",", "experiment_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'get_variation'", ")", ")", "return", "None", "if", "not", "validator", ".", "is_non_empty_string", "(", "experiment_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'experiment_key'", ")", ")", "return", "None", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "None", "experiment", "=", "self", ".", "config", ".", "get_experiment_from_key", "(", "experiment_key", ")", "variation_key", "=", "None", "if", "not", "experiment", ":", "self", ".", "logger", ".", "info", "(", "'Experiment key \"%s\" is invalid. Not activating user \"%s\".'", "%", "(", "experiment_key", ",", "user_id", ")", ")", "return", "None", "if", "not", "self", ".", "_validate_user_inputs", "(", "attributes", ")", ":", "return", "None", "variation", "=", "self", ".", "decision_service", ".", "get_variation", "(", "experiment", ",", "user_id", ",", "attributes", ")", "if", "variation", ":", "variation_key", "=", "variation", ".", "key", "if", "self", ".", "config", ".", "is_feature_experiment", "(", "experiment", ".", "id", ")", ":", "decision_notification_type", "=", "enums", ".", "DecisionNotificationTypes", ".", "FEATURE_TEST", "else", ":", "decision_notification_type", "=", "enums", ".", "DecisionNotificationTypes", ".", "AB_TEST", "self", ".", "notification_center", ".", "send_notifications", "(", "enums", ".", "NotificationTypes", ".", "DECISION", ",", "decision_notification_type", ",", "user_id", ",", "attributes", "or", "{", "}", ",", "{", "'experiment_key'", ":", "experiment_key", ",", "'variation_key'", ":", "variation_key", "}", ")", "return", "variation_key" ]
Gets variation where user will be bucketed. Args: experiment_key: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. Returns: Variation key representing the variation the user will be bucketed in. None if user is not in experiment or if experiment is not Running.
[ "Gets", "variation", "where", "user", "will", "be", "bucketed", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L348-L406
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.is_feature_enabled
def is_feature_enabled(self, feature_key, user_id, attributes=None): """ Returns true if the feature is enabled for the given user. Args: feature_key: The key of the feature for which we are determining if it is enabled or not for the given user. user_id: ID for user. attributes: Dict representing user attributes. Returns: True if the feature is enabled for the user. False otherwise. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('is_feature_enabled')) return False if not validator.is_non_empty_string(feature_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('feature_key')) return False if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return False if not self._validate_user_inputs(attributes): return False feature = self.config.get_feature_from_key(feature_key) if not feature: return False feature_enabled = False source_info = {} decision = self.decision_service.get_variation_for_feature(feature, user_id, attributes) is_source_experiment = decision.source == enums.DecisionSources.FEATURE_TEST if decision.variation: if decision.variation.featureEnabled is True: feature_enabled = True # Send event if Decision came from an experiment. if is_source_experiment: source_info = { 'experiment_key': decision.experiment.key, 'variation_key': decision.variation.key } self._send_impression_event(decision.experiment, decision.variation, user_id, attributes) if feature_enabled: self.logger.info('Feature "%s" is enabled for user "%s".' % (feature_key, user_id)) else: self.logger.info('Feature "%s" is not enabled for user "%s".' % (feature_key, user_id)) self.notification_center.send_notifications( enums.NotificationTypes.DECISION, enums.DecisionNotificationTypes.FEATURE, user_id, attributes or {}, { 'feature_key': feature_key, 'feature_enabled': feature_enabled, 'source': decision.source, 'source_info': source_info } ) return feature_enabled
python
def is_feature_enabled(self, feature_key, user_id, attributes=None): """ Returns true if the feature is enabled for the given user. Args: feature_key: The key of the feature for which we are determining if it is enabled or not for the given user. user_id: ID for user. attributes: Dict representing user attributes. Returns: True if the feature is enabled for the user. False otherwise. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('is_feature_enabled')) return False if not validator.is_non_empty_string(feature_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('feature_key')) return False if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return False if not self._validate_user_inputs(attributes): return False feature = self.config.get_feature_from_key(feature_key) if not feature: return False feature_enabled = False source_info = {} decision = self.decision_service.get_variation_for_feature(feature, user_id, attributes) is_source_experiment = decision.source == enums.DecisionSources.FEATURE_TEST if decision.variation: if decision.variation.featureEnabled is True: feature_enabled = True # Send event if Decision came from an experiment. if is_source_experiment: source_info = { 'experiment_key': decision.experiment.key, 'variation_key': decision.variation.key } self._send_impression_event(decision.experiment, decision.variation, user_id, attributes) if feature_enabled: self.logger.info('Feature "%s" is enabled for user "%s".' % (feature_key, user_id)) else: self.logger.info('Feature "%s" is not enabled for user "%s".' % (feature_key, user_id)) self.notification_center.send_notifications( enums.NotificationTypes.DECISION, enums.DecisionNotificationTypes.FEATURE, user_id, attributes or {}, { 'feature_key': feature_key, 'feature_enabled': feature_enabled, 'source': decision.source, 'source_info': source_info } ) return feature_enabled
[ "def", "is_feature_enabled", "(", "self", ",", "feature_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'is_feature_enabled'", ")", ")", "return", "False", "if", "not", "validator", ".", "is_non_empty_string", "(", "feature_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'feature_key'", ")", ")", "return", "False", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "False", "if", "not", "self", ".", "_validate_user_inputs", "(", "attributes", ")", ":", "return", "False", "feature", "=", "self", ".", "config", ".", "get_feature_from_key", "(", "feature_key", ")", "if", "not", "feature", ":", "return", "False", "feature_enabled", "=", "False", "source_info", "=", "{", "}", "decision", "=", "self", ".", "decision_service", ".", "get_variation_for_feature", "(", "feature", ",", "user_id", ",", "attributes", ")", "is_source_experiment", "=", "decision", ".", "source", "==", "enums", ".", "DecisionSources", ".", "FEATURE_TEST", "if", "decision", ".", "variation", ":", "if", "decision", ".", "variation", ".", "featureEnabled", "is", "True", ":", "feature_enabled", "=", "True", "# Send event if Decision came from an experiment.", "if", "is_source_experiment", ":", "source_info", "=", "{", "'experiment_key'", ":", "decision", ".", "experiment", ".", "key", ",", "'variation_key'", ":", "decision", ".", "variation", ".", "key", "}", "self", ".", "_send_impression_event", "(", "decision", ".", "experiment", ",", "decision", ".", "variation", ",", "user_id", ",", "attributes", ")", "if", "feature_enabled", ":", "self", ".", "logger", ".", "info", "(", "'Feature \"%s\" is enabled for user \"%s\".'", "%", "(", "feature_key", ",", "user_id", ")", ")", "else", ":", "self", ".", "logger", ".", "info", "(", "'Feature \"%s\" is not enabled for user \"%s\".'", "%", "(", "feature_key", ",", "user_id", ")", ")", "self", ".", "notification_center", ".", "send_notifications", "(", "enums", ".", "NotificationTypes", ".", "DECISION", ",", "enums", ".", "DecisionNotificationTypes", ".", "FEATURE", ",", "user_id", ",", "attributes", "or", "{", "}", ",", "{", "'feature_key'", ":", "feature_key", ",", "'feature_enabled'", ":", "feature_enabled", ",", "'source'", ":", "decision", ".", "source", ",", "'source_info'", ":", "source_info", "}", ")", "return", "feature_enabled" ]
Returns true if the feature is enabled for the given user. Args: feature_key: The key of the feature for which we are determining if it is enabled or not for the given user. user_id: ID for user. attributes: Dict representing user attributes. Returns: True if the feature is enabled for the user. False otherwise.
[ "Returns", "true", "if", "the", "feature", "is", "enabled", "for", "the", "given", "user", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L408-L476
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_enabled_features
def get_enabled_features(self, user_id, attributes=None): """ Returns the list of features that are enabled for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. Returns: A list of the keys of the features that are enabled for the user. """ enabled_features = [] if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_enabled_features')) return enabled_features if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return enabled_features if not self._validate_user_inputs(attributes): return enabled_features for feature in self.config.feature_key_map.values(): if self.is_feature_enabled(feature.key, user_id, attributes): enabled_features.append(feature.key) return enabled_features
python
def get_enabled_features(self, user_id, attributes=None): """ Returns the list of features that are enabled for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. Returns: A list of the keys of the features that are enabled for the user. """ enabled_features = [] if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_enabled_features')) return enabled_features if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return enabled_features if not self._validate_user_inputs(attributes): return enabled_features for feature in self.config.feature_key_map.values(): if self.is_feature_enabled(feature.key, user_id, attributes): enabled_features.append(feature.key) return enabled_features
[ "def", "get_enabled_features", "(", "self", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "enabled_features", "=", "[", "]", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'get_enabled_features'", ")", ")", "return", "enabled_features", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "enabled_features", "if", "not", "self", ".", "_validate_user_inputs", "(", "attributes", ")", ":", "return", "enabled_features", "for", "feature", "in", "self", ".", "config", ".", "feature_key_map", ".", "values", "(", ")", ":", "if", "self", ".", "is_feature_enabled", "(", "feature", ".", "key", ",", "user_id", ",", "attributes", ")", ":", "enabled_features", ".", "append", "(", "feature", ".", "key", ")", "return", "enabled_features" ]
Returns the list of features that are enabled for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. Returns: A list of the keys of the features that are enabled for the user.
[ "Returns", "the", "list", "of", "features", "that", "are", "enabled", "for", "the", "user", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L478-L505
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_feature_variable_boolean
def get_feature_variable_boolean(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain boolean variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Boolean value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.BOOLEAN return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
python
def get_feature_variable_boolean(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain boolean variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Boolean value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.BOOLEAN return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
[ "def", "get_feature_variable_boolean", "(", "self", ",", "feature_key", ",", "variable_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "variable_type", "=", "entities", ".", "Variable", ".", "Type", ".", "BOOLEAN", "return", "self", ".", "_get_feature_variable_for_type", "(", "feature_key", ",", "variable_key", ",", "variable_type", ",", "user_id", ",", "attributes", ")" ]
Returns value for a certain boolean variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Boolean value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable.
[ "Returns", "value", "for", "a", "certain", "boolean", "variable", "attached", "to", "a", "feature", "flag", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L507-L524
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_feature_variable_double
def get_feature_variable_double(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain double variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Double value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.DOUBLE return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
python
def get_feature_variable_double(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain double variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Double value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.DOUBLE return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
[ "def", "get_feature_variable_double", "(", "self", ",", "feature_key", ",", "variable_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "variable_type", "=", "entities", ".", "Variable", ".", "Type", ".", "DOUBLE", "return", "self", ".", "_get_feature_variable_for_type", "(", "feature_key", ",", "variable_key", ",", "variable_type", ",", "user_id", ",", "attributes", ")" ]
Returns value for a certain double variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Double value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable.
[ "Returns", "value", "for", "a", "certain", "double", "variable", "attached", "to", "a", "feature", "flag", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L526-L543
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_feature_variable_integer
def get_feature_variable_integer(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain integer variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Integer value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.INTEGER return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
python
def get_feature_variable_integer(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain integer variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Integer value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.INTEGER return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
[ "def", "get_feature_variable_integer", "(", "self", ",", "feature_key", ",", "variable_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "variable_type", "=", "entities", ".", "Variable", ".", "Type", ".", "INTEGER", "return", "self", ".", "_get_feature_variable_for_type", "(", "feature_key", ",", "variable_key", ",", "variable_type", ",", "user_id", ",", "attributes", ")" ]
Returns value for a certain integer variable attached to a feature flag. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: Integer value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable.
[ "Returns", "value", "for", "a", "certain", "integer", "variable", "attached", "to", "a", "feature", "flag", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L545-L562
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_feature_variable_string
def get_feature_variable_string(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain string variable attached to a feature. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: String value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.STRING return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
python
def get_feature_variable_string(self, feature_key, variable_key, user_id, attributes=None): """ Returns value for a certain string variable attached to a feature. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: String value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable. """ variable_type = entities.Variable.Type.STRING return self._get_feature_variable_for_type(feature_key, variable_key, variable_type, user_id, attributes)
[ "def", "get_feature_variable_string", "(", "self", ",", "feature_key", ",", "variable_key", ",", "user_id", ",", "attributes", "=", "None", ")", ":", "variable_type", "=", "entities", ".", "Variable", ".", "Type", ".", "STRING", "return", "self", ".", "_get_feature_variable_for_type", "(", "feature_key", ",", "variable_key", ",", "variable_type", ",", "user_id", ",", "attributes", ")" ]
Returns value for a certain string variable attached to a feature. Args: feature_key: Key of the feature whose variable's value is being accessed. variable_key: Key of the variable whose value is to be accessed. user_id: ID for user. attributes: Dict representing user attributes. Returns: String value of the variable. None if: - Feature key is invalid. - Variable key is invalid. - Mismatch with type of variable.
[ "Returns", "value", "for", "a", "certain", "string", "variable", "attached", "to", "a", "feature", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L564-L581
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.set_forced_variation
def set_forced_variation(self, experiment_key, user_id, variation_key): """ Force a user into a variation for a given experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. variation_key: A string variation key that specifies the variation which the user. will be forced into. If null, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('set_forced_variation')) return False if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return False if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return False return self.config.set_forced_variation(experiment_key, user_id, variation_key)
python
def set_forced_variation(self, experiment_key, user_id, variation_key): """ Force a user into a variation for a given experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. variation_key: A string variation key that specifies the variation which the user. will be forced into. If null, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('set_forced_variation')) return False if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return False if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return False return self.config.set_forced_variation(experiment_key, user_id, variation_key)
[ "def", "set_forced_variation", "(", "self", ",", "experiment_key", ",", "user_id", ",", "variation_key", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'set_forced_variation'", ")", ")", "return", "False", "if", "not", "validator", ".", "is_non_empty_string", "(", "experiment_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'experiment_key'", ")", ")", "return", "False", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "False", "return", "self", ".", "config", ".", "set_forced_variation", "(", "experiment_key", ",", "user_id", ",", "variation_key", ")" ]
Force a user into a variation for a given experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. variation_key: A string variation key that specifies the variation which the user. will be forced into. If null, then clear the existing experiment-to-variation mapping. Returns: A boolean value that indicates if the set completed successfully.
[ "Force", "a", "user", "into", "a", "variation", "for", "a", "given", "experiment", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L583-L608
train
optimizely/python-sdk
optimizely/optimizely.py
Optimizely.get_forced_variation
def get_forced_variation(self, experiment_key, user_id): """ Gets the forced variation for a given user and experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. Returns: The forced variation key. None if no forced variation key. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_forced_variation')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None forced_variation = self.config.get_forced_variation(experiment_key, user_id) return forced_variation.key if forced_variation else None
python
def get_forced_variation(self, experiment_key, user_id): """ Gets the forced variation for a given user and experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. Returns: The forced variation key. None if no forced variation key. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_forced_variation')) return None if not validator.is_non_empty_string(experiment_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('experiment_key')) return None if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return None forced_variation = self.config.get_forced_variation(experiment_key, user_id) return forced_variation.key if forced_variation else None
[ "def", "get_forced_variation", "(", "self", ",", "experiment_key", ",", "user_id", ")", ":", "if", "not", "self", ".", "is_valid", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_DATAFILE", ".", "format", "(", "'get_forced_variation'", ")", ")", "return", "None", "if", "not", "validator", ".", "is_non_empty_string", "(", "experiment_key", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'experiment_key'", ")", ")", "return", "None", "if", "not", "isinstance", "(", "user_id", ",", "string_types", ")", ":", "self", ".", "logger", ".", "error", "(", "enums", ".", "Errors", ".", "INVALID_INPUT_ERROR", ".", "format", "(", "'user_id'", ")", ")", "return", "None", "forced_variation", "=", "self", ".", "config", ".", "get_forced_variation", "(", "experiment_key", ",", "user_id", ")", "return", "forced_variation", ".", "key", "if", "forced_variation", "else", "None" ]
Gets the forced variation for a given user and experiment. Args: experiment_key: A string key identifying the experiment. user_id: The user ID. Returns: The forced variation key. None if no forced variation key.
[ "Gets", "the", "forced", "variation", "for", "a", "given", "user", "and", "experiment", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/optimizely.py#L610-L634
train
optimizely/python-sdk
optimizely/helpers/audience.py
is_user_in_experiment
def is_user_in_experiment(config, experiment, attributes, logger): """ Determine for given experiment if user satisfies the audiences for the experiment. Args: config: project_config.ProjectConfig object representing the project. experiment: Object representing the experiment. attributes: Dict representing user attributes which will be used in determining if the audience conditions are met. If not provided, default to an empty dict. logger: Provides a logger to send log messages to. Returns: Boolean representing if user satisfies audience conditions for any of the audiences or not. """ audience_conditions = experiment.getAudienceConditionsOrIds() logger.debug(audience_logs.EVALUATING_AUDIENCES_COMBINED.format( experiment.key, json.dumps(audience_conditions) )) # Return True in case there are no audiences if audience_conditions is None or audience_conditions == []: logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT_COMBINED.format( experiment.key, 'TRUE' )) return True if attributes is None: attributes = {} def evaluate_custom_attr(audienceId, index): audience = config.get_audience(audienceId) custom_attr_condition_evaluator = condition_helper.CustomAttributeConditionEvaluator( audience.conditionList, attributes, logger) return custom_attr_condition_evaluator.evaluate(index) def evaluate_audience(audienceId): audience = config.get_audience(audienceId) if audience is None: return None logger.debug(audience_logs.EVALUATING_AUDIENCE.format(audienceId, audience.conditions)) result = condition_tree_evaluator.evaluate( audience.conditionStructure, lambda index: evaluate_custom_attr(audienceId, index) ) result_str = str(result).upper() if result is not None else 'UNKNOWN' logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT.format(audienceId, result_str)) return result eval_result = condition_tree_evaluator.evaluate( audience_conditions, evaluate_audience ) eval_result = eval_result or False logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT_COMBINED.format( experiment.key, str(eval_result).upper() )) return eval_result
python
def is_user_in_experiment(config, experiment, attributes, logger): """ Determine for given experiment if user satisfies the audiences for the experiment. Args: config: project_config.ProjectConfig object representing the project. experiment: Object representing the experiment. attributes: Dict representing user attributes which will be used in determining if the audience conditions are met. If not provided, default to an empty dict. logger: Provides a logger to send log messages to. Returns: Boolean representing if user satisfies audience conditions for any of the audiences or not. """ audience_conditions = experiment.getAudienceConditionsOrIds() logger.debug(audience_logs.EVALUATING_AUDIENCES_COMBINED.format( experiment.key, json.dumps(audience_conditions) )) # Return True in case there are no audiences if audience_conditions is None or audience_conditions == []: logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT_COMBINED.format( experiment.key, 'TRUE' )) return True if attributes is None: attributes = {} def evaluate_custom_attr(audienceId, index): audience = config.get_audience(audienceId) custom_attr_condition_evaluator = condition_helper.CustomAttributeConditionEvaluator( audience.conditionList, attributes, logger) return custom_attr_condition_evaluator.evaluate(index) def evaluate_audience(audienceId): audience = config.get_audience(audienceId) if audience is None: return None logger.debug(audience_logs.EVALUATING_AUDIENCE.format(audienceId, audience.conditions)) result = condition_tree_evaluator.evaluate( audience.conditionStructure, lambda index: evaluate_custom_attr(audienceId, index) ) result_str = str(result).upper() if result is not None else 'UNKNOWN' logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT.format(audienceId, result_str)) return result eval_result = condition_tree_evaluator.evaluate( audience_conditions, evaluate_audience ) eval_result = eval_result or False logger.info(audience_logs.AUDIENCE_EVALUATION_RESULT_COMBINED.format( experiment.key, str(eval_result).upper() )) return eval_result
[ "def", "is_user_in_experiment", "(", "config", ",", "experiment", ",", "attributes", ",", "logger", ")", ":", "audience_conditions", "=", "experiment", ".", "getAudienceConditionsOrIds", "(", ")", "logger", ".", "debug", "(", "audience_logs", ".", "EVALUATING_AUDIENCES_COMBINED", ".", "format", "(", "experiment", ".", "key", ",", "json", ".", "dumps", "(", "audience_conditions", ")", ")", ")", "# Return True in case there are no audiences", "if", "audience_conditions", "is", "None", "or", "audience_conditions", "==", "[", "]", ":", "logger", ".", "info", "(", "audience_logs", ".", "AUDIENCE_EVALUATION_RESULT_COMBINED", ".", "format", "(", "experiment", ".", "key", ",", "'TRUE'", ")", ")", "return", "True", "if", "attributes", "is", "None", ":", "attributes", "=", "{", "}", "def", "evaluate_custom_attr", "(", "audienceId", ",", "index", ")", ":", "audience", "=", "config", ".", "get_audience", "(", "audienceId", ")", "custom_attr_condition_evaluator", "=", "condition_helper", ".", "CustomAttributeConditionEvaluator", "(", "audience", ".", "conditionList", ",", "attributes", ",", "logger", ")", "return", "custom_attr_condition_evaluator", ".", "evaluate", "(", "index", ")", "def", "evaluate_audience", "(", "audienceId", ")", ":", "audience", "=", "config", ".", "get_audience", "(", "audienceId", ")", "if", "audience", "is", "None", ":", "return", "None", "logger", ".", "debug", "(", "audience_logs", ".", "EVALUATING_AUDIENCE", ".", "format", "(", "audienceId", ",", "audience", ".", "conditions", ")", ")", "result", "=", "condition_tree_evaluator", ".", "evaluate", "(", "audience", ".", "conditionStructure", ",", "lambda", "index", ":", "evaluate_custom_attr", "(", "audienceId", ",", "index", ")", ")", "result_str", "=", "str", "(", "result", ")", ".", "upper", "(", ")", "if", "result", "is", "not", "None", "else", "'UNKNOWN'", "logger", ".", "info", "(", "audience_logs", ".", "AUDIENCE_EVALUATION_RESULT", ".", "format", "(", "audienceId", ",", "result_str", ")", ")", "return", "result", "eval_result", "=", "condition_tree_evaluator", ".", "evaluate", "(", "audience_conditions", ",", "evaluate_audience", ")", "eval_result", "=", "eval_result", "or", "False", "logger", ".", "info", "(", "audience_logs", ".", "AUDIENCE_EVALUATION_RESULT_COMBINED", ".", "format", "(", "experiment", ".", "key", ",", "str", "(", "eval_result", ")", ".", "upper", "(", ")", ")", ")", "return", "eval_result" ]
Determine for given experiment if user satisfies the audiences for the experiment. Args: config: project_config.ProjectConfig object representing the project. experiment: Object representing the experiment. attributes: Dict representing user attributes which will be used in determining if the audience conditions are met. If not provided, default to an empty dict. logger: Provides a logger to send log messages to. Returns: Boolean representing if user satisfies audience conditions for any of the audiences or not.
[ "Determine", "for", "given", "experiment", "if", "user", "satisfies", "the", "audiences", "for", "the", "experiment", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/audience.py#L21-L91
train
optimizely/python-sdk
optimizely/event_builder.py
BaseEventBuilder._get_common_params
def _get_common_params(self, user_id, attributes): """ Get params which are used same in both conversion and impression events. Args: user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Dict consisting of parameters common to both impression and conversion events. """ commonParams = {} commonParams[self.EventParams.PROJECT_ID] = self._get_project_id() commonParams[self.EventParams.ACCOUNT_ID] = self._get_account_id() visitor = {} visitor[self.EventParams.END_USER_ID] = user_id visitor[self.EventParams.SNAPSHOTS] = [] commonParams[self.EventParams.USERS] = [] commonParams[self.EventParams.USERS].append(visitor) commonParams[self.EventParams.USERS][0][self.EventParams.ATTRIBUTES] = self._get_attributes(attributes) commonParams[self.EventParams.SOURCE_SDK_TYPE] = 'python-sdk' commonParams[self.EventParams.ENRICH_DECISIONS] = True commonParams[self.EventParams.SOURCE_SDK_VERSION] = version.__version__ commonParams[self.EventParams.ANONYMIZE_IP] = self._get_anonymize_ip() commonParams[self.EventParams.REVISION] = self._get_revision() return commonParams
python
def _get_common_params(self, user_id, attributes): """ Get params which are used same in both conversion and impression events. Args: user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Dict consisting of parameters common to both impression and conversion events. """ commonParams = {} commonParams[self.EventParams.PROJECT_ID] = self._get_project_id() commonParams[self.EventParams.ACCOUNT_ID] = self._get_account_id() visitor = {} visitor[self.EventParams.END_USER_ID] = user_id visitor[self.EventParams.SNAPSHOTS] = [] commonParams[self.EventParams.USERS] = [] commonParams[self.EventParams.USERS].append(visitor) commonParams[self.EventParams.USERS][0][self.EventParams.ATTRIBUTES] = self._get_attributes(attributes) commonParams[self.EventParams.SOURCE_SDK_TYPE] = 'python-sdk' commonParams[self.EventParams.ENRICH_DECISIONS] = True commonParams[self.EventParams.SOURCE_SDK_VERSION] = version.__version__ commonParams[self.EventParams.ANONYMIZE_IP] = self._get_anonymize_ip() commonParams[self.EventParams.REVISION] = self._get_revision() return commonParams
[ "def", "_get_common_params", "(", "self", ",", "user_id", ",", "attributes", ")", ":", "commonParams", "=", "{", "}", "commonParams", "[", "self", ".", "EventParams", ".", "PROJECT_ID", "]", "=", "self", ".", "_get_project_id", "(", ")", "commonParams", "[", "self", ".", "EventParams", ".", "ACCOUNT_ID", "]", "=", "self", ".", "_get_account_id", "(", ")", "visitor", "=", "{", "}", "visitor", "[", "self", ".", "EventParams", ".", "END_USER_ID", "]", "=", "user_id", "visitor", "[", "self", ".", "EventParams", ".", "SNAPSHOTS", "]", "=", "[", "]", "commonParams", "[", "self", ".", "EventParams", ".", "USERS", "]", "=", "[", "]", "commonParams", "[", "self", ".", "EventParams", ".", "USERS", "]", ".", "append", "(", "visitor", ")", "commonParams", "[", "self", ".", "EventParams", ".", "USERS", "]", "[", "0", "]", "[", "self", ".", "EventParams", ".", "ATTRIBUTES", "]", "=", "self", ".", "_get_attributes", "(", "attributes", ")", "commonParams", "[", "self", ".", "EventParams", ".", "SOURCE_SDK_TYPE", "]", "=", "'python-sdk'", "commonParams", "[", "self", ".", "EventParams", ".", "ENRICH_DECISIONS", "]", "=", "True", "commonParams", "[", "self", ".", "EventParams", ".", "SOURCE_SDK_VERSION", "]", "=", "version", ".", "__version__", "commonParams", "[", "self", ".", "EventParams", ".", "ANONYMIZE_IP", "]", "=", "self", ".", "_get_anonymize_ip", "(", ")", "commonParams", "[", "self", ".", "EventParams", ".", "REVISION", "]", "=", "self", ".", "_get_revision", "(", ")", "return", "commonParams" ]
Get params which are used same in both conversion and impression events. Args: user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Dict consisting of parameters common to both impression and conversion events.
[ "Get", "params", "which", "are", "used", "same", "in", "both", "conversion", "and", "impression", "events", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_builder.py#L109-L138
train
optimizely/python-sdk
optimizely/event_builder.py
EventBuilder._get_required_params_for_impression
def _get_required_params_for_impression(self, experiment, variation_id): """ Get parameters that are required for the impression event to register. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. Returns: Dict consisting of decisions and events info for impression event. """ snapshot = {} snapshot[self.EventParams.DECISIONS] = [{ self.EventParams.EXPERIMENT_ID: experiment.id, self.EventParams.VARIATION_ID: variation_id, self.EventParams.CAMPAIGN_ID: experiment.layerId }] snapshot[self.EventParams.EVENTS] = [{ self.EventParams.EVENT_ID: experiment.layerId, self.EventParams.TIME: self._get_time(), self.EventParams.KEY: 'campaign_activated', self.EventParams.UUID: str(uuid.uuid4()) }] return snapshot
python
def _get_required_params_for_impression(self, experiment, variation_id): """ Get parameters that are required for the impression event to register. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. Returns: Dict consisting of decisions and events info for impression event. """ snapshot = {} snapshot[self.EventParams.DECISIONS] = [{ self.EventParams.EXPERIMENT_ID: experiment.id, self.EventParams.VARIATION_ID: variation_id, self.EventParams.CAMPAIGN_ID: experiment.layerId }] snapshot[self.EventParams.EVENTS] = [{ self.EventParams.EVENT_ID: experiment.layerId, self.EventParams.TIME: self._get_time(), self.EventParams.KEY: 'campaign_activated', self.EventParams.UUID: str(uuid.uuid4()) }] return snapshot
[ "def", "_get_required_params_for_impression", "(", "self", ",", "experiment", ",", "variation_id", ")", ":", "snapshot", "=", "{", "}", "snapshot", "[", "self", ".", "EventParams", ".", "DECISIONS", "]", "=", "[", "{", "self", ".", "EventParams", ".", "EXPERIMENT_ID", ":", "experiment", ".", "id", ",", "self", ".", "EventParams", ".", "VARIATION_ID", ":", "variation_id", ",", "self", ".", "EventParams", ".", "CAMPAIGN_ID", ":", "experiment", ".", "layerId", "}", "]", "snapshot", "[", "self", ".", "EventParams", ".", "EVENTS", "]", "=", "[", "{", "self", ".", "EventParams", ".", "EVENT_ID", ":", "experiment", ".", "layerId", ",", "self", ".", "EventParams", ".", "TIME", ":", "self", ".", "_get_time", "(", ")", ",", "self", ".", "EventParams", ".", "KEY", ":", "'campaign_activated'", ",", "self", ".", "EventParams", ".", "UUID", ":", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "}", "]", "return", "snapshot" ]
Get parameters that are required for the impression event to register. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. Returns: Dict consisting of decisions and events info for impression event.
[ "Get", "parameters", "that", "are", "required", "for", "the", "impression", "event", "to", "register", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_builder.py#L211-L236
train
optimizely/python-sdk
optimizely/event_builder.py
EventBuilder._get_required_params_for_conversion
def _get_required_params_for_conversion(self, event_key, event_tags): """ Get parameters that are required for the conversion event to register. Args: event_key: Key representing the event which needs to be recorded. event_tags: Dict representing metadata associated with the event. Returns: Dict consisting of the decisions and events info for conversion event. """ snapshot = {} event_dict = { self.EventParams.EVENT_ID: self.config.get_event(event_key).id, self.EventParams.TIME: self._get_time(), self.EventParams.KEY: event_key, self.EventParams.UUID: str(uuid.uuid4()) } if event_tags: revenue_value = event_tag_utils.get_revenue_value(event_tags) if revenue_value is not None: event_dict[event_tag_utils.REVENUE_METRIC_TYPE] = revenue_value numeric_value = event_tag_utils.get_numeric_value(event_tags, self.config.logger) if numeric_value is not None: event_dict[event_tag_utils.NUMERIC_METRIC_TYPE] = numeric_value if len(event_tags) > 0: event_dict[self.EventParams.TAGS] = event_tags snapshot[self.EventParams.EVENTS] = [event_dict] return snapshot
python
def _get_required_params_for_conversion(self, event_key, event_tags): """ Get parameters that are required for the conversion event to register. Args: event_key: Key representing the event which needs to be recorded. event_tags: Dict representing metadata associated with the event. Returns: Dict consisting of the decisions and events info for conversion event. """ snapshot = {} event_dict = { self.EventParams.EVENT_ID: self.config.get_event(event_key).id, self.EventParams.TIME: self._get_time(), self.EventParams.KEY: event_key, self.EventParams.UUID: str(uuid.uuid4()) } if event_tags: revenue_value = event_tag_utils.get_revenue_value(event_tags) if revenue_value is not None: event_dict[event_tag_utils.REVENUE_METRIC_TYPE] = revenue_value numeric_value = event_tag_utils.get_numeric_value(event_tags, self.config.logger) if numeric_value is not None: event_dict[event_tag_utils.NUMERIC_METRIC_TYPE] = numeric_value if len(event_tags) > 0: event_dict[self.EventParams.TAGS] = event_tags snapshot[self.EventParams.EVENTS] = [event_dict] return snapshot
[ "def", "_get_required_params_for_conversion", "(", "self", ",", "event_key", ",", "event_tags", ")", ":", "snapshot", "=", "{", "}", "event_dict", "=", "{", "self", ".", "EventParams", ".", "EVENT_ID", ":", "self", ".", "config", ".", "get_event", "(", "event_key", ")", ".", "id", ",", "self", ".", "EventParams", ".", "TIME", ":", "self", ".", "_get_time", "(", ")", ",", "self", ".", "EventParams", ".", "KEY", ":", "event_key", ",", "self", ".", "EventParams", ".", "UUID", ":", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "}", "if", "event_tags", ":", "revenue_value", "=", "event_tag_utils", ".", "get_revenue_value", "(", "event_tags", ")", "if", "revenue_value", "is", "not", "None", ":", "event_dict", "[", "event_tag_utils", ".", "REVENUE_METRIC_TYPE", "]", "=", "revenue_value", "numeric_value", "=", "event_tag_utils", ".", "get_numeric_value", "(", "event_tags", ",", "self", ".", "config", ".", "logger", ")", "if", "numeric_value", "is", "not", "None", ":", "event_dict", "[", "event_tag_utils", ".", "NUMERIC_METRIC_TYPE", "]", "=", "numeric_value", "if", "len", "(", "event_tags", ")", ">", "0", ":", "event_dict", "[", "self", ".", "EventParams", ".", "TAGS", "]", "=", "event_tags", "snapshot", "[", "self", ".", "EventParams", ".", "EVENTS", "]", "=", "[", "event_dict", "]", "return", "snapshot" ]
Get parameters that are required for the conversion event to register. Args: event_key: Key representing the event which needs to be recorded. event_tags: Dict representing metadata associated with the event. Returns: Dict consisting of the decisions and events info for conversion event.
[ "Get", "parameters", "that", "are", "required", "for", "the", "conversion", "event", "to", "register", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_builder.py#L238-L270
train
optimizely/python-sdk
optimizely/event_builder.py
EventBuilder.create_impression_event
def create_impression_event(self, experiment, variation_id, user_id, attributes): """ Create impression Event to be sent to the logging endpoint. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Event object encapsulating the impression event. """ params = self._get_common_params(user_id, attributes) impression_params = self._get_required_params_for_impression(experiment, variation_id) params[self.EventParams.USERS][0][self.EventParams.SNAPSHOTS].append(impression_params) return Event(self.EVENTS_URL, params, http_verb=self.HTTP_VERB, headers=self.HTTP_HEADERS)
python
def create_impression_event(self, experiment, variation_id, user_id, attributes): """ Create impression Event to be sent to the logging endpoint. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Event object encapsulating the impression event. """ params = self._get_common_params(user_id, attributes) impression_params = self._get_required_params_for_impression(experiment, variation_id) params[self.EventParams.USERS][0][self.EventParams.SNAPSHOTS].append(impression_params) return Event(self.EVENTS_URL, params, http_verb=self.HTTP_VERB, headers=self.HTTP_HEADERS)
[ "def", "create_impression_event", "(", "self", ",", "experiment", ",", "variation_id", ",", "user_id", ",", "attributes", ")", ":", "params", "=", "self", ".", "_get_common_params", "(", "user_id", ",", "attributes", ")", "impression_params", "=", "self", ".", "_get_required_params_for_impression", "(", "experiment", ",", "variation_id", ")", "params", "[", "self", ".", "EventParams", ".", "USERS", "]", "[", "0", "]", "[", "self", ".", "EventParams", ".", "SNAPSHOTS", "]", ".", "append", "(", "impression_params", ")", "return", "Event", "(", "self", ".", "EVENTS_URL", ",", "params", ",", "http_verb", "=", "self", ".", "HTTP_VERB", ",", "headers", "=", "self", ".", "HTTP_HEADERS", ")" ]
Create impression Event to be sent to the logging endpoint. Args: experiment: Experiment for which impression needs to be recorded. variation_id: ID for variation which would be presented to user. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded. Returns: Event object encapsulating the impression event.
[ "Create", "impression", "Event", "to", "be", "sent", "to", "the", "logging", "endpoint", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_builder.py#L272-L293
train
optimizely/python-sdk
optimizely/event_builder.py
EventBuilder.create_conversion_event
def create_conversion_event(self, event_key, user_id, attributes, event_tags): """ Create conversion Event to be sent to the logging endpoint. Args: event_key: Key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing user attributes and values. event_tags: Dict representing metadata associated with the event. Returns: Event object encapsulating the conversion event. """ params = self._get_common_params(user_id, attributes) conversion_params = self._get_required_params_for_conversion(event_key, event_tags) params[self.EventParams.USERS][0][self.EventParams.SNAPSHOTS].append(conversion_params) return Event(self.EVENTS_URL, params, http_verb=self.HTTP_VERB, headers=self.HTTP_HEADERS)
python
def create_conversion_event(self, event_key, user_id, attributes, event_tags): """ Create conversion Event to be sent to the logging endpoint. Args: event_key: Key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing user attributes and values. event_tags: Dict representing metadata associated with the event. Returns: Event object encapsulating the conversion event. """ params = self._get_common_params(user_id, attributes) conversion_params = self._get_required_params_for_conversion(event_key, event_tags) params[self.EventParams.USERS][0][self.EventParams.SNAPSHOTS].append(conversion_params) return Event(self.EVENTS_URL, params, http_verb=self.HTTP_VERB, headers=self.HTTP_HEADERS)
[ "def", "create_conversion_event", "(", "self", ",", "event_key", ",", "user_id", ",", "attributes", ",", "event_tags", ")", ":", "params", "=", "self", ".", "_get_common_params", "(", "user_id", ",", "attributes", ")", "conversion_params", "=", "self", ".", "_get_required_params_for_conversion", "(", "event_key", ",", "event_tags", ")", "params", "[", "self", ".", "EventParams", ".", "USERS", "]", "[", "0", "]", "[", "self", ".", "EventParams", ".", "SNAPSHOTS", "]", ".", "append", "(", "conversion_params", ")", "return", "Event", "(", "self", ".", "EVENTS_URL", ",", "params", ",", "http_verb", "=", "self", ".", "HTTP_VERB", ",", "headers", "=", "self", ".", "HTTP_HEADERS", ")" ]
Create conversion Event to be sent to the logging endpoint. Args: event_key: Key representing the event which needs to be recorded. user_id: ID for user. attributes: Dict representing user attributes and values. event_tags: Dict representing metadata associated with the event. Returns: Event object encapsulating the conversion event.
[ "Create", "conversion", "Event", "to", "be", "sent", "to", "the", "logging", "endpoint", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/event_builder.py#L295-L315
train
optimizely/python-sdk
optimizely/helpers/condition.py
_audience_condition_deserializer
def _audience_condition_deserializer(obj_dict): """ Deserializer defining how dict objects need to be decoded for audience conditions. Args: obj_dict: Dict representing one audience condition. Returns: List consisting of condition key with corresponding value, type and match. """ return [ obj_dict.get('name'), obj_dict.get('value'), obj_dict.get('type'), obj_dict.get('match') ]
python
def _audience_condition_deserializer(obj_dict): """ Deserializer defining how dict objects need to be decoded for audience conditions. Args: obj_dict: Dict representing one audience condition. Returns: List consisting of condition key with corresponding value, type and match. """ return [ obj_dict.get('name'), obj_dict.get('value'), obj_dict.get('type'), obj_dict.get('match') ]
[ "def", "_audience_condition_deserializer", "(", "obj_dict", ")", ":", "return", "[", "obj_dict", ".", "get", "(", "'name'", ")", ",", "obj_dict", ".", "get", "(", "'value'", ")", ",", "obj_dict", ".", "get", "(", "'type'", ")", ",", "obj_dict", ".", "get", "(", "'match'", ")", "]" ]
Deserializer defining how dict objects need to be decoded for audience conditions. Args: obj_dict: Dict representing one audience condition. Returns: List consisting of condition key with corresponding value, type and match.
[ "Deserializer", "defining", "how", "dict", "objects", "need", "to", "be", "decoded", "for", "audience", "conditions", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L328-L342
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator._get_condition_json
def _get_condition_json(self, index): """ Method to generate json for logging audience condition. Args: index: Index of the condition. Returns: String: Audience condition JSON. """ condition = self.condition_data[index] condition_log = { 'name': condition[0], 'value': condition[1], 'type': condition[2], 'match': condition[3] } return json.dumps(condition_log)
python
def _get_condition_json(self, index): """ Method to generate json for logging audience condition. Args: index: Index of the condition. Returns: String: Audience condition JSON. """ condition = self.condition_data[index] condition_log = { 'name': condition[0], 'value': condition[1], 'type': condition[2], 'match': condition[3] } return json.dumps(condition_log)
[ "def", "_get_condition_json", "(", "self", ",", "index", ")", ":", "condition", "=", "self", ".", "condition_data", "[", "index", "]", "condition_log", "=", "{", "'name'", ":", "condition", "[", "0", "]", ",", "'value'", ":", "condition", "[", "1", "]", ",", "'type'", ":", "condition", "[", "2", "]", ",", "'match'", ":", "condition", "[", "3", "]", "}", "return", "json", ".", "dumps", "(", "condition_log", ")" ]
Method to generate json for logging audience condition. Args: index: Index of the condition. Returns: String: Audience condition JSON.
[ "Method", "to", "generate", "json", "for", "logging", "audience", "condition", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L47-L64
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator.is_value_type_valid_for_exact_conditions
def is_value_type_valid_for_exact_conditions(self, value): """ Method to validate if the value is valid for exact match type evaluation. Args: value: Value to validate. Returns: Boolean: True if value is a string, boolean, or number. Otherwise False. """ # No need to check for bool since bool is a subclass of int if isinstance(value, string_types) or isinstance(value, (numbers.Integral, float)): return True return False
python
def is_value_type_valid_for_exact_conditions(self, value): """ Method to validate if the value is valid for exact match type evaluation. Args: value: Value to validate. Returns: Boolean: True if value is a string, boolean, or number. Otherwise False. """ # No need to check for bool since bool is a subclass of int if isinstance(value, string_types) or isinstance(value, (numbers.Integral, float)): return True return False
[ "def", "is_value_type_valid_for_exact_conditions", "(", "self", ",", "value", ")", ":", "# No need to check for bool since bool is a subclass of int", "if", "isinstance", "(", "value", ",", "string_types", ")", "or", "isinstance", "(", "value", ",", "(", "numbers", ".", "Integral", ",", "float", ")", ")", ":", "return", "True", "return", "False" ]
Method to validate if the value is valid for exact match type evaluation. Args: value: Value to validate. Returns: Boolean: True if value is a string, boolean, or number. Otherwise False.
[ "Method", "to", "validate", "if", "the", "value", "is", "valid", "for", "exact", "match", "type", "evaluation", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L66-L79
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator.exists_evaluator
def exists_evaluator(self, index): """ Evaluate the given exists match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: True if the user attributes have a non-null value for the given condition, otherwise False. """ attr_name = self.condition_data[index][0] return self.attributes.get(attr_name) is not None
python
def exists_evaluator(self, index): """ Evaluate the given exists match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: True if the user attributes have a non-null value for the given condition, otherwise False. """ attr_name = self.condition_data[index][0] return self.attributes.get(attr_name) is not None
[ "def", "exists_evaluator", "(", "self", ",", "index", ")", ":", "attr_name", "=", "self", ".", "condition_data", "[", "index", "]", "[", "0", "]", "return", "self", ".", "attributes", ".", "get", "(", "attr_name", ")", "is", "not", "None" ]
Evaluate the given exists match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: True if the user attributes have a non-null value for the given condition, otherwise False.
[ "Evaluate", "the", "given", "exists", "match", "condition", "for", "the", "user", "attributes", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L131-L142
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator.greater_than_evaluator
def greater_than_evaluator(self, index): """ Evaluate the given greater than match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attribute value is greater than the condition value. - False if the user attribute value is less than or equal to the condition value. None: if the condition value isn't finite or the user attribute value isn't finite. """ condition_name = self.condition_data[index][0] condition_value = self.condition_data[index][1] user_value = self.attributes.get(condition_name) if not validator.is_finite_number(condition_value): self.logger.warning(audience_logs.UNKNOWN_CONDITION_VALUE.format( self._get_condition_json(index) )) return None if not self.is_value_a_number(user_value): self.logger.warning(audience_logs.UNEXPECTED_TYPE.format( self._get_condition_json(index), type(user_value), condition_name )) return None if not validator.is_finite_number(user_value): self.logger.warning(audience_logs.INFINITE_ATTRIBUTE_VALUE.format( self._get_condition_json(index), condition_name )) return None return user_value > condition_value
python
def greater_than_evaluator(self, index): """ Evaluate the given greater than match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attribute value is greater than the condition value. - False if the user attribute value is less than or equal to the condition value. None: if the condition value isn't finite or the user attribute value isn't finite. """ condition_name = self.condition_data[index][0] condition_value = self.condition_data[index][1] user_value = self.attributes.get(condition_name) if not validator.is_finite_number(condition_value): self.logger.warning(audience_logs.UNKNOWN_CONDITION_VALUE.format( self._get_condition_json(index) )) return None if not self.is_value_a_number(user_value): self.logger.warning(audience_logs.UNEXPECTED_TYPE.format( self._get_condition_json(index), type(user_value), condition_name )) return None if not validator.is_finite_number(user_value): self.logger.warning(audience_logs.INFINITE_ATTRIBUTE_VALUE.format( self._get_condition_json(index), condition_name )) return None return user_value > condition_value
[ "def", "greater_than_evaluator", "(", "self", ",", "index", ")", ":", "condition_name", "=", "self", ".", "condition_data", "[", "index", "]", "[", "0", "]", "condition_value", "=", "self", ".", "condition_data", "[", "index", "]", "[", "1", "]", "user_value", "=", "self", ".", "attributes", ".", "get", "(", "condition_name", ")", "if", "not", "validator", ".", "is_finite_number", "(", "condition_value", ")", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNKNOWN_CONDITION_VALUE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ")", ")", "return", "None", "if", "not", "self", ".", "is_value_a_number", "(", "user_value", ")", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNEXPECTED_TYPE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", "type", "(", "user_value", ")", ",", "condition_name", ")", ")", "return", "None", "if", "not", "validator", ".", "is_finite_number", "(", "user_value", ")", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "INFINITE_ATTRIBUTE_VALUE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", "condition_name", ")", ")", "return", "None", "return", "user_value", ">", "condition_value" ]
Evaluate the given greater than match condition for the user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attribute value is greater than the condition value. - False if the user attribute value is less than or equal to the condition value. None: if the condition value isn't finite or the user attribute value isn't finite.
[ "Evaluate", "the", "given", "greater", "than", "match", "condition", "for", "the", "user", "attributes", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L144-L181
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator.substring_evaluator
def substring_evaluator(self, index): """ Evaluate the given substring match condition for the given user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the condition value is a substring of the user attribute value. - False if the condition value is not a substring of the user attribute value. None: if the condition value isn't a string or the user attribute value isn't a string. """ condition_name = self.condition_data[index][0] condition_value = self.condition_data[index][1] user_value = self.attributes.get(condition_name) if not isinstance(condition_value, string_types): self.logger.warning(audience_logs.UNKNOWN_CONDITION_VALUE.format( self._get_condition_json(index), )) return None if not isinstance(user_value, string_types): self.logger.warning(audience_logs.UNEXPECTED_TYPE.format( self._get_condition_json(index), type(user_value), condition_name )) return None return condition_value in user_value
python
def substring_evaluator(self, index): """ Evaluate the given substring match condition for the given user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the condition value is a substring of the user attribute value. - False if the condition value is not a substring of the user attribute value. None: if the condition value isn't a string or the user attribute value isn't a string. """ condition_name = self.condition_data[index][0] condition_value = self.condition_data[index][1] user_value = self.attributes.get(condition_name) if not isinstance(condition_value, string_types): self.logger.warning(audience_logs.UNKNOWN_CONDITION_VALUE.format( self._get_condition_json(index), )) return None if not isinstance(user_value, string_types): self.logger.warning(audience_logs.UNEXPECTED_TYPE.format( self._get_condition_json(index), type(user_value), condition_name )) return None return condition_value in user_value
[ "def", "substring_evaluator", "(", "self", ",", "index", ")", ":", "condition_name", "=", "self", ".", "condition_data", "[", "index", "]", "[", "0", "]", "condition_value", "=", "self", ".", "condition_data", "[", "index", "]", "[", "1", "]", "user_value", "=", "self", ".", "attributes", ".", "get", "(", "condition_name", ")", "if", "not", "isinstance", "(", "condition_value", ",", "string_types", ")", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNKNOWN_CONDITION_VALUE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", ")", ")", "return", "None", "if", "not", "isinstance", "(", "user_value", ",", "string_types", ")", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNEXPECTED_TYPE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", "type", "(", "user_value", ")", ",", "condition_name", ")", ")", "return", "None", "return", "condition_value", "in", "user_value" ]
Evaluate the given substring match condition for the given user attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the condition value is a substring of the user attribute value. - False if the condition value is not a substring of the user attribute value. None: if the condition value isn't a string or the user attribute value isn't a string.
[ "Evaluate", "the", "given", "substring", "match", "condition", "for", "the", "given", "user", "attributes", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L222-L252
train
optimizely/python-sdk
optimizely/helpers/condition.py
CustomAttributeConditionEvaluator.evaluate
def evaluate(self, index): """ Given a custom attribute audience condition and user attributes, evaluate the condition against the attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attributes match the given condition. - False if the user attributes don't match the given condition. None: if the user attributes and condition can't be evaluated. """ if self.condition_data[index][2] != self.CUSTOM_ATTRIBUTE_CONDITION_TYPE: self.logger.warning(audience_logs.UNKNOWN_CONDITION_TYPE.format(self._get_condition_json(index))) return None condition_match = self.condition_data[index][3] if condition_match is None: condition_match = ConditionMatchTypes.EXACT if condition_match not in self.EVALUATORS_BY_MATCH_TYPE: self.logger.warning(audience_logs.UNKNOWN_MATCH_TYPE.format(self._get_condition_json(index))) return None if condition_match != ConditionMatchTypes.EXISTS: attribute_key = self.condition_data[index][0] if attribute_key not in self.attributes: self.logger.debug(audience_logs.MISSING_ATTRIBUTE_VALUE.format(self._get_condition_json(index), attribute_key)) return None if self.attributes.get(attribute_key) is None: self.logger.debug(audience_logs.NULL_ATTRIBUTE_VALUE.format(self._get_condition_json(index), attribute_key)) return None return self.EVALUATORS_BY_MATCH_TYPE[condition_match](self, index)
python
def evaluate(self, index): """ Given a custom attribute audience condition and user attributes, evaluate the condition against the attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attributes match the given condition. - False if the user attributes don't match the given condition. None: if the user attributes and condition can't be evaluated. """ if self.condition_data[index][2] != self.CUSTOM_ATTRIBUTE_CONDITION_TYPE: self.logger.warning(audience_logs.UNKNOWN_CONDITION_TYPE.format(self._get_condition_json(index))) return None condition_match = self.condition_data[index][3] if condition_match is None: condition_match = ConditionMatchTypes.EXACT if condition_match not in self.EVALUATORS_BY_MATCH_TYPE: self.logger.warning(audience_logs.UNKNOWN_MATCH_TYPE.format(self._get_condition_json(index))) return None if condition_match != ConditionMatchTypes.EXISTS: attribute_key = self.condition_data[index][0] if attribute_key not in self.attributes: self.logger.debug(audience_logs.MISSING_ATTRIBUTE_VALUE.format(self._get_condition_json(index), attribute_key)) return None if self.attributes.get(attribute_key) is None: self.logger.debug(audience_logs.NULL_ATTRIBUTE_VALUE.format(self._get_condition_json(index), attribute_key)) return None return self.EVALUATORS_BY_MATCH_TYPE[condition_match](self, index)
[ "def", "evaluate", "(", "self", ",", "index", ")", ":", "if", "self", ".", "condition_data", "[", "index", "]", "[", "2", "]", "!=", "self", ".", "CUSTOM_ATTRIBUTE_CONDITION_TYPE", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNKNOWN_CONDITION_TYPE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ")", ")", "return", "None", "condition_match", "=", "self", ".", "condition_data", "[", "index", "]", "[", "3", "]", "if", "condition_match", "is", "None", ":", "condition_match", "=", "ConditionMatchTypes", ".", "EXACT", "if", "condition_match", "not", "in", "self", ".", "EVALUATORS_BY_MATCH_TYPE", ":", "self", ".", "logger", ".", "warning", "(", "audience_logs", ".", "UNKNOWN_MATCH_TYPE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ")", ")", "return", "None", "if", "condition_match", "!=", "ConditionMatchTypes", ".", "EXISTS", ":", "attribute_key", "=", "self", ".", "condition_data", "[", "index", "]", "[", "0", "]", "if", "attribute_key", "not", "in", "self", ".", "attributes", ":", "self", ".", "logger", ".", "debug", "(", "audience_logs", ".", "MISSING_ATTRIBUTE_VALUE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", "attribute_key", ")", ")", "return", "None", "if", "self", ".", "attributes", ".", "get", "(", "attribute_key", ")", "is", "None", ":", "self", ".", "logger", ".", "debug", "(", "audience_logs", ".", "NULL_ATTRIBUTE_VALUE", ".", "format", "(", "self", ".", "_get_condition_json", "(", "index", ")", ",", "attribute_key", ")", ")", "return", "None", "return", "self", ".", "EVALUATORS_BY_MATCH_TYPE", "[", "condition_match", "]", "(", "self", ",", "index", ")" ]
Given a custom attribute audience condition and user attributes, evaluate the condition against the attributes. Args: index: Index of the condition to be evaluated. Returns: Boolean: - True if the user attributes match the given condition. - False if the user attributes don't match the given condition. None: if the user attributes and condition can't be evaluated.
[ "Given", "a", "custom", "attribute", "audience", "condition", "and", "user", "attributes", "evaluate", "the", "condition", "against", "the", "attributes", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L262-L298
train
optimizely/python-sdk
optimizely/helpers/condition.py
ConditionDecoder.object_hook
def object_hook(self, object_dict): """ Hook which when passed into a json.JSONDecoder will replace each dict in a json string with its index and convert the dict to an object as defined by the passed in condition_decoder. The newly created condition object is appended to the conditions_list. Args: object_dict: Dict representing an object. Returns: An index which will be used as the placeholder in the condition_structure """ instance = self.decoder(object_dict) self.condition_list.append(instance) self.index += 1 return self.index
python
def object_hook(self, object_dict): """ Hook which when passed into a json.JSONDecoder will replace each dict in a json string with its index and convert the dict to an object as defined by the passed in condition_decoder. The newly created condition object is appended to the conditions_list. Args: object_dict: Dict representing an object. Returns: An index which will be used as the placeholder in the condition_structure """ instance = self.decoder(object_dict) self.condition_list.append(instance) self.index += 1 return self.index
[ "def", "object_hook", "(", "self", ",", "object_dict", ")", ":", "instance", "=", "self", ".", "decoder", "(", "object_dict", ")", "self", ".", "condition_list", ".", "append", "(", "instance", ")", "self", ".", "index", "+=", "1", "return", "self", ".", "index" ]
Hook which when passed into a json.JSONDecoder will replace each dict in a json string with its index and convert the dict to an object as defined by the passed in condition_decoder. The newly created condition object is appended to the conditions_list. Args: object_dict: Dict representing an object. Returns: An index which will be used as the placeholder in the condition_structure
[ "Hook", "which", "when", "passed", "into", "a", "json", ".", "JSONDecoder", "will", "replace", "each", "dict", "in", "a", "json", "string", "with", "its", "index", "and", "convert", "the", "dict", "to", "an", "object", "as", "defined", "by", "the", "passed", "in", "condition_decoder", ".", "The", "newly", "created", "condition", "object", "is", "appended", "to", "the", "conditions_list", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition.py#L310-L325
train
optimizely/python-sdk
optimizely/decision_service.py
DecisionService._get_bucketing_id
def _get_bucketing_id(self, user_id, attributes): """ Helper method to determine bucketing ID for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. May consist of bucketing ID to be used. Returns: String representing bucketing ID if it is a String type in attributes else return user ID. """ attributes = attributes or {} bucketing_id = attributes.get(enums.ControlAttributes.BUCKETING_ID) if bucketing_id is not None: if isinstance(bucketing_id, string_types): return bucketing_id self.logger.warning('Bucketing ID attribute is not a string. Defaulted to user_id.') return user_id
python
def _get_bucketing_id(self, user_id, attributes): """ Helper method to determine bucketing ID for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. May consist of bucketing ID to be used. Returns: String representing bucketing ID if it is a String type in attributes else return user ID. """ attributes = attributes or {} bucketing_id = attributes.get(enums.ControlAttributes.BUCKETING_ID) if bucketing_id is not None: if isinstance(bucketing_id, string_types): return bucketing_id self.logger.warning('Bucketing ID attribute is not a string. Defaulted to user_id.') return user_id
[ "def", "_get_bucketing_id", "(", "self", ",", "user_id", ",", "attributes", ")", ":", "attributes", "=", "attributes", "or", "{", "}", "bucketing_id", "=", "attributes", ".", "get", "(", "enums", ".", "ControlAttributes", ".", "BUCKETING_ID", ")", "if", "bucketing_id", "is", "not", "None", ":", "if", "isinstance", "(", "bucketing_id", ",", "string_types", ")", ":", "return", "bucketing_id", "self", ".", "logger", ".", "warning", "(", "'Bucketing ID attribute is not a string. Defaulted to user_id.'", ")", "return", "user_id" ]
Helper method to determine bucketing ID for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. May consist of bucketing ID to be used. Returns: String representing bucketing ID if it is a String type in attributes else return user ID.
[ "Helper", "method", "to", "determine", "bucketing", "ID", "for", "the", "user", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/decision_service.py#L36-L56
train
optimizely/python-sdk
optimizely/decision_service.py
DecisionService.get_forced_variation
def get_forced_variation(self, experiment, user_id): """ Determine if a user is forced into a variation for the given experiment and return that variation. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for the user. Returns: Variation in which the user with ID user_id is forced into. None if no variation. """ forced_variations = experiment.forcedVariations if forced_variations and user_id in forced_variations: variation_key = forced_variations.get(user_id) variation = self.config.get_variation_from_key(experiment.key, variation_key) if variation: self.logger.info('User "%s" is forced in variation "%s".' % (user_id, variation_key)) return variation return None
python
def get_forced_variation(self, experiment, user_id): """ Determine if a user is forced into a variation for the given experiment and return that variation. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for the user. Returns: Variation in which the user with ID user_id is forced into. None if no variation. """ forced_variations = experiment.forcedVariations if forced_variations and user_id in forced_variations: variation_key = forced_variations.get(user_id) variation = self.config.get_variation_from_key(experiment.key, variation_key) if variation: self.logger.info('User "%s" is forced in variation "%s".' % (user_id, variation_key)) return variation return None
[ "def", "get_forced_variation", "(", "self", ",", "experiment", ",", "user_id", ")", ":", "forced_variations", "=", "experiment", ".", "forcedVariations", "if", "forced_variations", "and", "user_id", "in", "forced_variations", ":", "variation_key", "=", "forced_variations", ".", "get", "(", "user_id", ")", "variation", "=", "self", ".", "config", ".", "get_variation_from_key", "(", "experiment", ".", "key", ",", "variation_key", ")", "if", "variation", ":", "self", ".", "logger", ".", "info", "(", "'User \"%s\" is forced in variation \"%s\".'", "%", "(", "user_id", ",", "variation_key", ")", ")", "return", "variation", "return", "None" ]
Determine if a user is forced into a variation for the given experiment and return that variation. Args: experiment: Object representing the experiment for which user is to be bucketed. user_id: ID for the user. Returns: Variation in which the user with ID user_id is forced into. None if no variation.
[ "Determine", "if", "a", "user", "is", "forced", "into", "a", "variation", "for", "the", "given", "experiment", "and", "return", "that", "variation", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/decision_service.py#L58-L77
train
optimizely/python-sdk
optimizely/decision_service.py
DecisionService.get_stored_variation
def get_stored_variation(self, experiment, user_profile): """ Determine if the user has a stored variation available for the given experiment and return that. Args: experiment: Object representing the experiment for which user is to be bucketed. user_profile: UserProfile object representing the user's profile. Returns: Variation if available. None otherwise. """ user_id = user_profile.user_id variation_id = user_profile.get_variation_for_experiment(experiment.id) if variation_id: variation = self.config.get_variation_from_id(experiment.key, variation_id) if variation: self.logger.info('Found a stored decision. User "%s" is in variation "%s" of experiment "%s".' % ( user_id, variation.key, experiment.key )) return variation return None
python
def get_stored_variation(self, experiment, user_profile): """ Determine if the user has a stored variation available for the given experiment and return that. Args: experiment: Object representing the experiment for which user is to be bucketed. user_profile: UserProfile object representing the user's profile. Returns: Variation if available. None otherwise. """ user_id = user_profile.user_id variation_id = user_profile.get_variation_for_experiment(experiment.id) if variation_id: variation = self.config.get_variation_from_id(experiment.key, variation_id) if variation: self.logger.info('Found a stored decision. User "%s" is in variation "%s" of experiment "%s".' % ( user_id, variation.key, experiment.key )) return variation return None
[ "def", "get_stored_variation", "(", "self", ",", "experiment", ",", "user_profile", ")", ":", "user_id", "=", "user_profile", ".", "user_id", "variation_id", "=", "user_profile", ".", "get_variation_for_experiment", "(", "experiment", ".", "id", ")", "if", "variation_id", ":", "variation", "=", "self", ".", "config", ".", "get_variation_from_id", "(", "experiment", ".", "key", ",", "variation_id", ")", "if", "variation", ":", "self", ".", "logger", ".", "info", "(", "'Found a stored decision. User \"%s\" is in variation \"%s\" of experiment \"%s\".'", "%", "(", "user_id", ",", "variation", ".", "key", ",", "experiment", ".", "key", ")", ")", "return", "variation", "return", "None" ]
Determine if the user has a stored variation available for the given experiment and return that. Args: experiment: Object representing the experiment for which user is to be bucketed. user_profile: UserProfile object representing the user's profile. Returns: Variation if available. None otherwise.
[ "Determine", "if", "the", "user", "has", "a", "stored", "variation", "available", "for", "the", "given", "experiment", "and", "return", "that", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/decision_service.py#L79-L103
train
optimizely/python-sdk
optimizely/decision_service.py
DecisionService.get_variation
def get_variation(self, experiment, user_id, attributes, ignore_user_profile=False): """ Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running. """ # Check if experiment is running if not experiment_helper.is_experiment_running(experiment): self.logger.info('Experiment "%s" is not running.' % experiment.key) return None # Check if the user is forced into a variation variation = self.config.get_forced_variation(experiment.key, user_id) if variation: return variation # Check to see if user is white-listed for a certain variation variation = self.get_forced_variation(experiment, user_id) if variation: return variation # Check to see if user has a decision available for the given experiment user_profile = UserProfile(user_id) if not ignore_user_profile and self.user_profile_service: try: retrieved_profile = self.user_profile_service.lookup(user_id) except: self.logger.exception('Unable to retrieve user profile for user "%s" as lookup failed.' % user_id) retrieved_profile = None if validator.is_user_profile_valid(retrieved_profile): user_profile = UserProfile(**retrieved_profile) variation = self.get_stored_variation(experiment, user_profile) if variation: return variation else: self.logger.warning('User profile has invalid format.') # Bucket user and store the new decision if not audience_helper.is_user_in_experiment(self.config, experiment, attributes, self.logger): self.logger.info('User "%s" does not meet conditions to be in experiment "%s".' % ( user_id, experiment.key )) return None # Determine bucketing ID to be used bucketing_id = self._get_bucketing_id(user_id, attributes) variation = self.bucketer.bucket(experiment, user_id, bucketing_id) if variation: # Store this new decision and return the variation for the user if not ignore_user_profile and self.user_profile_service: try: user_profile.save_variation_for_experiment(experiment.id, variation.id) self.user_profile_service.save(user_profile.__dict__) except: self.logger.exception('Unable to save user profile for user "%s".' % user_id) return variation return None
python
def get_variation(self, experiment, user_id, attributes, ignore_user_profile=False): """ Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running. """ # Check if experiment is running if not experiment_helper.is_experiment_running(experiment): self.logger.info('Experiment "%s" is not running.' % experiment.key) return None # Check if the user is forced into a variation variation = self.config.get_forced_variation(experiment.key, user_id) if variation: return variation # Check to see if user is white-listed for a certain variation variation = self.get_forced_variation(experiment, user_id) if variation: return variation # Check to see if user has a decision available for the given experiment user_profile = UserProfile(user_id) if not ignore_user_profile and self.user_profile_service: try: retrieved_profile = self.user_profile_service.lookup(user_id) except: self.logger.exception('Unable to retrieve user profile for user "%s" as lookup failed.' % user_id) retrieved_profile = None if validator.is_user_profile_valid(retrieved_profile): user_profile = UserProfile(**retrieved_profile) variation = self.get_stored_variation(experiment, user_profile) if variation: return variation else: self.logger.warning('User profile has invalid format.') # Bucket user and store the new decision if not audience_helper.is_user_in_experiment(self.config, experiment, attributes, self.logger): self.logger.info('User "%s" does not meet conditions to be in experiment "%s".' % ( user_id, experiment.key )) return None # Determine bucketing ID to be used bucketing_id = self._get_bucketing_id(user_id, attributes) variation = self.bucketer.bucket(experiment, user_id, bucketing_id) if variation: # Store this new decision and return the variation for the user if not ignore_user_profile and self.user_profile_service: try: user_profile.save_variation_for_experiment(experiment.id, variation.id) self.user_profile_service.save(user_profile.__dict__) except: self.logger.exception('Unable to save user profile for user "%s".' % user_id) return variation return None
[ "def", "get_variation", "(", "self", ",", "experiment", ",", "user_id", ",", "attributes", ",", "ignore_user_profile", "=", "False", ")", ":", "# Check if experiment is running", "if", "not", "experiment_helper", ".", "is_experiment_running", "(", "experiment", ")", ":", "self", ".", "logger", ".", "info", "(", "'Experiment \"%s\" is not running.'", "%", "experiment", ".", "key", ")", "return", "None", "# Check if the user is forced into a variation", "variation", "=", "self", ".", "config", ".", "get_forced_variation", "(", "experiment", ".", "key", ",", "user_id", ")", "if", "variation", ":", "return", "variation", "# Check to see if user is white-listed for a certain variation", "variation", "=", "self", ".", "get_forced_variation", "(", "experiment", ",", "user_id", ")", "if", "variation", ":", "return", "variation", "# Check to see if user has a decision available for the given experiment", "user_profile", "=", "UserProfile", "(", "user_id", ")", "if", "not", "ignore_user_profile", "and", "self", ".", "user_profile_service", ":", "try", ":", "retrieved_profile", "=", "self", ".", "user_profile_service", ".", "lookup", "(", "user_id", ")", "except", ":", "self", ".", "logger", ".", "exception", "(", "'Unable to retrieve user profile for user \"%s\" as lookup failed.'", "%", "user_id", ")", "retrieved_profile", "=", "None", "if", "validator", ".", "is_user_profile_valid", "(", "retrieved_profile", ")", ":", "user_profile", "=", "UserProfile", "(", "*", "*", "retrieved_profile", ")", "variation", "=", "self", ".", "get_stored_variation", "(", "experiment", ",", "user_profile", ")", "if", "variation", ":", "return", "variation", "else", ":", "self", ".", "logger", ".", "warning", "(", "'User profile has invalid format.'", ")", "# Bucket user and store the new decision", "if", "not", "audience_helper", ".", "is_user_in_experiment", "(", "self", ".", "config", ",", "experiment", ",", "attributes", ",", "self", ".", "logger", ")", ":", "self", ".", "logger", ".", "info", "(", "'User \"%s\" does not meet conditions to be in experiment \"%s\".'", "%", "(", "user_id", ",", "experiment", ".", "key", ")", ")", "return", "None", "# Determine bucketing ID to be used", "bucketing_id", "=", "self", ".", "_get_bucketing_id", "(", "user_id", ",", "attributes", ")", "variation", "=", "self", ".", "bucketer", ".", "bucket", "(", "experiment", ",", "user_id", ",", "bucketing_id", ")", "if", "variation", ":", "# Store this new decision and return the variation for the user", "if", "not", "ignore_user_profile", "and", "self", ".", "user_profile_service", ":", "try", ":", "user_profile", ".", "save_variation_for_experiment", "(", "experiment", ".", "id", ",", "variation", ".", "id", ")", "self", ".", "user_profile_service", ".", "save", "(", "user_profile", ".", "__dict__", ")", "except", ":", "self", ".", "logger", ".", "exception", "(", "'Unable to save user profile for user \"%s\".'", "%", "user_id", ")", "return", "variation", "return", "None" ]
Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running.
[ "Top", "-", "level", "function", "to", "help", "determine", "variation", "user", "should", "be", "put", "in", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/decision_service.py#L105-L178
train
optimizely/python-sdk
optimizely/decision_service.py
DecisionService.get_experiment_in_group
def get_experiment_in_group(self, group, bucketing_id): """ Determine which experiment in the group the user is bucketed into. Args: group: The group to bucket the user into. bucketing_id: ID to be used for bucketing the user. Returns: Experiment if the user is bucketed into an experiment in the specified group. None otherwise. """ experiment_id = self.bucketer.find_bucket(bucketing_id, group.id, group.trafficAllocation) if experiment_id: experiment = self.config.get_experiment_from_id(experiment_id) if experiment: self.logger.info('User with bucketing ID "%s" is in experiment %s of group %s.' % ( bucketing_id, experiment.key, group.id )) return experiment self.logger.info('User with bucketing ID "%s" is not in any experiments of group %s.' % ( bucketing_id, group.id )) return None
python
def get_experiment_in_group(self, group, bucketing_id): """ Determine which experiment in the group the user is bucketed into. Args: group: The group to bucket the user into. bucketing_id: ID to be used for bucketing the user. Returns: Experiment if the user is bucketed into an experiment in the specified group. None otherwise. """ experiment_id = self.bucketer.find_bucket(bucketing_id, group.id, group.trafficAllocation) if experiment_id: experiment = self.config.get_experiment_from_id(experiment_id) if experiment: self.logger.info('User with bucketing ID "%s" is in experiment %s of group %s.' % ( bucketing_id, experiment.key, group.id )) return experiment self.logger.info('User with bucketing ID "%s" is not in any experiments of group %s.' % ( bucketing_id, group.id )) return None
[ "def", "get_experiment_in_group", "(", "self", ",", "group", ",", "bucketing_id", ")", ":", "experiment_id", "=", "self", ".", "bucketer", ".", "find_bucket", "(", "bucketing_id", ",", "group", ".", "id", ",", "group", ".", "trafficAllocation", ")", "if", "experiment_id", ":", "experiment", "=", "self", ".", "config", ".", "get_experiment_from_id", "(", "experiment_id", ")", "if", "experiment", ":", "self", ".", "logger", ".", "info", "(", "'User with bucketing ID \"%s\" is in experiment %s of group %s.'", "%", "(", "bucketing_id", ",", "experiment", ".", "key", ",", "group", ".", "id", ")", ")", "return", "experiment", "self", ".", "logger", ".", "info", "(", "'User with bucketing ID \"%s\" is not in any experiments of group %s.'", "%", "(", "bucketing_id", ",", "group", ".", "id", ")", ")", "return", "None" ]
Determine which experiment in the group the user is bucketed into. Args: group: The group to bucket the user into. bucketing_id: ID to be used for bucketing the user. Returns: Experiment if the user is bucketed into an experiment in the specified group. None otherwise.
[ "Determine", "which", "experiment", "in", "the", "group", "the", "user", "is", "bucketed", "into", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/decision_service.py#L238-L265
train
optimizely/python-sdk
optimizely/notification_center.py
NotificationCenter.add_notification_listener
def add_notification_listener(self, notification_type, notification_callback): """ Add a notification callback to the notification center. Args: notification_type: A string representing the notification type from .helpers.enums.NotificationTypes notification_callback: closure of function to call when event is triggered. Returns: Integer notification id used to remove the notification or -1 if the notification has already been added. """ if notification_type not in self.notifications: self.notifications[notification_type] = [(self.notification_id, notification_callback)] else: if reduce(lambda a, b: a + 1, filter(lambda tup: tup[1] == notification_callback, self.notifications[notification_type]), 0) > 0: return -1 self.notifications[notification_type].append((self.notification_id, notification_callback)) ret_val = self.notification_id self.notification_id += 1 return ret_val
python
def add_notification_listener(self, notification_type, notification_callback): """ Add a notification callback to the notification center. Args: notification_type: A string representing the notification type from .helpers.enums.NotificationTypes notification_callback: closure of function to call when event is triggered. Returns: Integer notification id used to remove the notification or -1 if the notification has already been added. """ if notification_type not in self.notifications: self.notifications[notification_type] = [(self.notification_id, notification_callback)] else: if reduce(lambda a, b: a + 1, filter(lambda tup: tup[1] == notification_callback, self.notifications[notification_type]), 0) > 0: return -1 self.notifications[notification_type].append((self.notification_id, notification_callback)) ret_val = self.notification_id self.notification_id += 1 return ret_val
[ "def", "add_notification_listener", "(", "self", ",", "notification_type", ",", "notification_callback", ")", ":", "if", "notification_type", "not", "in", "self", ".", "notifications", ":", "self", ".", "notifications", "[", "notification_type", "]", "=", "[", "(", "self", ".", "notification_id", ",", "notification_callback", ")", "]", "else", ":", "if", "reduce", "(", "lambda", "a", ",", "b", ":", "a", "+", "1", ",", "filter", "(", "lambda", "tup", ":", "tup", "[", "1", "]", "==", "notification_callback", ",", "self", ".", "notifications", "[", "notification_type", "]", ")", ",", "0", ")", ">", "0", ":", "return", "-", "1", "self", ".", "notifications", "[", "notification_type", "]", ".", "append", "(", "(", "self", ".", "notification_id", ",", "notification_callback", ")", ")", "ret_val", "=", "self", ".", "notification_id", "self", ".", "notification_id", "+=", "1", "return", "ret_val" ]
Add a notification callback to the notification center. Args: notification_type: A string representing the notification type from .helpers.enums.NotificationTypes notification_callback: closure of function to call when event is triggered. Returns: Integer notification id used to remove the notification or -1 if the notification has already been added.
[ "Add", "a", "notification", "callback", "to", "the", "notification", "center", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/notification_center.py#L29-L53
train
optimizely/python-sdk
optimizely/notification_center.py
NotificationCenter.remove_notification_listener
def remove_notification_listener(self, notification_id): """ Remove a previously added notification callback. Args: notification_id: The numeric id passed back from add_notification_listener Returns: The function returns boolean true if found and removed, false otherwise. """ for v in self.notifications.values(): toRemove = list(filter(lambda tup: tup[0] == notification_id, v)) if len(toRemove) > 0: v.remove(toRemove[0]) return True return False
python
def remove_notification_listener(self, notification_id): """ Remove a previously added notification callback. Args: notification_id: The numeric id passed back from add_notification_listener Returns: The function returns boolean true if found and removed, false otherwise. """ for v in self.notifications.values(): toRemove = list(filter(lambda tup: tup[0] == notification_id, v)) if len(toRemove) > 0: v.remove(toRemove[0]) return True return False
[ "def", "remove_notification_listener", "(", "self", ",", "notification_id", ")", ":", "for", "v", "in", "self", ".", "notifications", ".", "values", "(", ")", ":", "toRemove", "=", "list", "(", "filter", "(", "lambda", "tup", ":", "tup", "[", "0", "]", "==", "notification_id", ",", "v", ")", ")", "if", "len", "(", "toRemove", ")", ">", "0", ":", "v", ".", "remove", "(", "toRemove", "[", "0", "]", ")", "return", "True", "return", "False" ]
Remove a previously added notification callback. Args: notification_id: The numeric id passed back from add_notification_listener Returns: The function returns boolean true if found and removed, false otherwise.
[ "Remove", "a", "previously", "added", "notification", "callback", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/notification_center.py#L55-L71
train
optimizely/python-sdk
optimizely/notification_center.py
NotificationCenter.send_notifications
def send_notifications(self, notification_type, *args): """ Fires off the notification for the specific event. Uses var args to pass in a arbitrary list of parameter according to which notification type was fired. Args: notification_type: Type of notification to fire (String from .helpers.enums.NotificationTypes) args: variable list of arguments to the callback. """ if notification_type in self.notifications: for notification_id, callback in self.notifications[notification_type]: try: callback(*args) except: self.logger.exception('Problem calling notify callback!')
python
def send_notifications(self, notification_type, *args): """ Fires off the notification for the specific event. Uses var args to pass in a arbitrary list of parameter according to which notification type was fired. Args: notification_type: Type of notification to fire (String from .helpers.enums.NotificationTypes) args: variable list of arguments to the callback. """ if notification_type in self.notifications: for notification_id, callback in self.notifications[notification_type]: try: callback(*args) except: self.logger.exception('Problem calling notify callback!')
[ "def", "send_notifications", "(", "self", ",", "notification_type", ",", "*", "args", ")", ":", "if", "notification_type", "in", "self", ".", "notifications", ":", "for", "notification_id", ",", "callback", "in", "self", ".", "notifications", "[", "notification_type", "]", ":", "try", ":", "callback", "(", "*", "args", ")", "except", ":", "self", ".", "logger", ".", "exception", "(", "'Problem calling notify callback!'", ")" ]
Fires off the notification for the specific event. Uses var args to pass in a arbitrary list of parameter according to which notification type was fired. Args: notification_type: Type of notification to fire (String from .helpers.enums.NotificationTypes) args: variable list of arguments to the callback.
[ "Fires", "off", "the", "notification", "for", "the", "specific", "event", ".", "Uses", "var", "args", "to", "pass", "in", "a", "arbitrary", "list", "of", "parameter", "according", "to", "which", "notification", "type", "was", "fired", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/notification_center.py#L87-L101
train
optimizely/python-sdk
optimizely/helpers/condition_tree_evaluator.py
and_evaluator
def and_evaluator(conditions, leaf_evaluator): """ Evaluates a list of conditions as if the evaluator had been applied to each entry and the results AND-ed together. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if all operands evaluate to True. - False if a single operand evaluates to False. None: if conditions couldn't be evaluated. """ saw_null_result = False for condition in conditions: result = evaluate(condition, leaf_evaluator) if result is False: return False if result is None: saw_null_result = True return None if saw_null_result else True
python
def and_evaluator(conditions, leaf_evaluator): """ Evaluates a list of conditions as if the evaluator had been applied to each entry and the results AND-ed together. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if all operands evaluate to True. - False if a single operand evaluates to False. None: if conditions couldn't be evaluated. """ saw_null_result = False for condition in conditions: result = evaluate(condition, leaf_evaluator) if result is False: return False if result is None: saw_null_result = True return None if saw_null_result else True
[ "def", "and_evaluator", "(", "conditions", ",", "leaf_evaluator", ")", ":", "saw_null_result", "=", "False", "for", "condition", "in", "conditions", ":", "result", "=", "evaluate", "(", "condition", ",", "leaf_evaluator", ")", "if", "result", "is", "False", ":", "return", "False", "if", "result", "is", "None", ":", "saw_null_result", "=", "True", "return", "None", "if", "saw_null_result", "else", "True" ]
Evaluates a list of conditions as if the evaluator had been applied to each entry and the results AND-ed together. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if all operands evaluate to True. - False if a single operand evaluates to False. None: if conditions couldn't be evaluated.
[ "Evaluates", "a", "list", "of", "conditions", "as", "if", "the", "evaluator", "had", "been", "applied", "to", "each", "entry", "and", "the", "results", "AND", "-", "ed", "together", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition_tree_evaluator.py#L17-L40
train
optimizely/python-sdk
optimizely/helpers/condition_tree_evaluator.py
not_evaluator
def not_evaluator(conditions, leaf_evaluator): """ Evaluates a list of conditions as if the evaluator had been applied to a single entry and NOT was applied to the result. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if the operand evaluates to False. - False if the operand evaluates to True. None: if conditions is empty or condition couldn't be evaluated. """ if not len(conditions) > 0: return None result = evaluate(conditions[0], leaf_evaluator) return None if result is None else not result
python
def not_evaluator(conditions, leaf_evaluator): """ Evaluates a list of conditions as if the evaluator had been applied to a single entry and NOT was applied to the result. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if the operand evaluates to False. - False if the operand evaluates to True. None: if conditions is empty or condition couldn't be evaluated. """ if not len(conditions) > 0: return None result = evaluate(conditions[0], leaf_evaluator) return None if result is None else not result
[ "def", "not_evaluator", "(", "conditions", ",", "leaf_evaluator", ")", ":", "if", "not", "len", "(", "conditions", ")", ">", "0", ":", "return", "None", "result", "=", "evaluate", "(", "conditions", "[", "0", "]", ",", "leaf_evaluator", ")", "return", "None", "if", "result", "is", "None", "else", "not", "result" ]
Evaluates a list of conditions as if the evaluator had been applied to a single entry and NOT was applied to the result. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if the operand evaluates to False. - False if the operand evaluates to True. None: if conditions is empty or condition couldn't be evaluated.
[ "Evaluates", "a", "list", "of", "conditions", "as", "if", "the", "evaluator", "had", "been", "applied", "to", "a", "single", "entry", "and", "NOT", "was", "applied", "to", "the", "result", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition_tree_evaluator.py#L69-L87
train
optimizely/python-sdk
optimizely/helpers/condition_tree_evaluator.py
evaluate
def evaluate(conditions, leaf_evaluator): """ Top level method to evaluate conditions. Args: conditions: Nested array of and/or conditions, or a single leaf condition value of any type. Example: ['and', '0', ['or', '1', '2']] leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: Result of evaluating the conditions using the operator rules and the leaf evaluator. None: if conditions couldn't be evaluated. """ if isinstance(conditions, list): if conditions[0] in list(EVALUATORS_BY_OPERATOR_TYPE.keys()): return EVALUATORS_BY_OPERATOR_TYPE[conditions[0]](conditions[1:], leaf_evaluator) else: # assume OR when operator is not explicit. return EVALUATORS_BY_OPERATOR_TYPE[ConditionOperatorTypes.OR](conditions, leaf_evaluator) leaf_condition = conditions return leaf_evaluator(leaf_condition)
python
def evaluate(conditions, leaf_evaluator): """ Top level method to evaluate conditions. Args: conditions: Nested array of and/or conditions, or a single leaf condition value of any type. Example: ['and', '0', ['or', '1', '2']] leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: Result of evaluating the conditions using the operator rules and the leaf evaluator. None: if conditions couldn't be evaluated. """ if isinstance(conditions, list): if conditions[0] in list(EVALUATORS_BY_OPERATOR_TYPE.keys()): return EVALUATORS_BY_OPERATOR_TYPE[conditions[0]](conditions[1:], leaf_evaluator) else: # assume OR when operator is not explicit. return EVALUATORS_BY_OPERATOR_TYPE[ConditionOperatorTypes.OR](conditions, leaf_evaluator) leaf_condition = conditions return leaf_evaluator(leaf_condition)
[ "def", "evaluate", "(", "conditions", ",", "leaf_evaluator", ")", ":", "if", "isinstance", "(", "conditions", ",", "list", ")", ":", "if", "conditions", "[", "0", "]", "in", "list", "(", "EVALUATORS_BY_OPERATOR_TYPE", ".", "keys", "(", ")", ")", ":", "return", "EVALUATORS_BY_OPERATOR_TYPE", "[", "conditions", "[", "0", "]", "]", "(", "conditions", "[", "1", ":", "]", ",", "leaf_evaluator", ")", "else", ":", "# assume OR when operator is not explicit.", "return", "EVALUATORS_BY_OPERATOR_TYPE", "[", "ConditionOperatorTypes", ".", "OR", "]", "(", "conditions", ",", "leaf_evaluator", ")", "leaf_condition", "=", "conditions", "return", "leaf_evaluator", "(", "leaf_condition", ")" ]
Top level method to evaluate conditions. Args: conditions: Nested array of and/or conditions, or a single leaf condition value of any type. Example: ['and', '0', ['or', '1', '2']] leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: Result of evaluating the conditions using the operator rules and the leaf evaluator. None: if conditions couldn't be evaluated.
[ "Top", "level", "method", "to", "evaluate", "conditions", "." ]
ec028d9efcf22498c3820f2650fa10f5c30bec90
https://github.com/optimizely/python-sdk/blob/ec028d9efcf22498c3820f2650fa10f5c30bec90/optimizely/helpers/condition_tree_evaluator.py#L97-L119
train
Parisson/TimeSide
timeside/core/analyzer.py
data_objet_class
def data_objet_class(data_mode='value', time_mode='framewise'): """ Factory function for Analyzer result """ classes_table = {('value', 'global'): GlobalValueObject, ('value', 'event'): EventValueObject, ('value', 'segment'): SegmentValueObject, ('value', 'framewise'): FrameValueObject, ('label', 'global'): GlobalLabelObject, ('label', 'event'): EventLabelObject, ('label', 'segment'): SegmentLabelObject, ('label', 'framewise'): FrameLabelObject} try: return classes_table[(data_mode, time_mode)] except KeyError as e: raise ValueError('Wrong arguments')
python
def data_objet_class(data_mode='value', time_mode='framewise'): """ Factory function for Analyzer result """ classes_table = {('value', 'global'): GlobalValueObject, ('value', 'event'): EventValueObject, ('value', 'segment'): SegmentValueObject, ('value', 'framewise'): FrameValueObject, ('label', 'global'): GlobalLabelObject, ('label', 'event'): EventLabelObject, ('label', 'segment'): SegmentLabelObject, ('label', 'framewise'): FrameLabelObject} try: return classes_table[(data_mode, time_mode)] except KeyError as e: raise ValueError('Wrong arguments')
[ "def", "data_objet_class", "(", "data_mode", "=", "'value'", ",", "time_mode", "=", "'framewise'", ")", ":", "classes_table", "=", "{", "(", "'value'", ",", "'global'", ")", ":", "GlobalValueObject", ",", "(", "'value'", ",", "'event'", ")", ":", "EventValueObject", ",", "(", "'value'", ",", "'segment'", ")", ":", "SegmentValueObject", ",", "(", "'value'", ",", "'framewise'", ")", ":", "FrameValueObject", ",", "(", "'label'", ",", "'global'", ")", ":", "GlobalLabelObject", ",", "(", "'label'", ",", "'event'", ")", ":", "EventLabelObject", ",", "(", "'label'", ",", "'segment'", ")", ":", "SegmentLabelObject", ",", "(", "'label'", ",", "'framewise'", ")", ":", "FrameLabelObject", "}", "try", ":", "return", "classes_table", "[", "(", "data_mode", ",", "time_mode", ")", "]", "except", "KeyError", "as", "e", ":", "raise", "ValueError", "(", "'Wrong arguments'", ")" ]
Factory function for Analyzer result
[ "Factory", "function", "for", "Analyzer", "result" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/analyzer.py#L511-L527
train
Parisson/TimeSide
timeside/core/analyzer.py
JSON_NumpyArrayEncoder
def JSON_NumpyArrayEncoder(obj): '''Define Specialize JSON encoder for numpy array''' if isinstance(obj, np.ndarray): return {'numpyArray': obj.tolist(), 'dtype': obj.dtype.__str__()} elif isinstance(obj, np.generic): return np.asscalar(obj) else: print type(obj) raise TypeError(repr(obj) + " is not JSON serializable")
python
def JSON_NumpyArrayEncoder(obj): '''Define Specialize JSON encoder for numpy array''' if isinstance(obj, np.ndarray): return {'numpyArray': obj.tolist(), 'dtype': obj.dtype.__str__()} elif isinstance(obj, np.generic): return np.asscalar(obj) else: print type(obj) raise TypeError(repr(obj) + " is not JSON serializable")
[ "def", "JSON_NumpyArrayEncoder", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "np", ".", "ndarray", ")", ":", "return", "{", "'numpyArray'", ":", "obj", ".", "tolist", "(", ")", ",", "'dtype'", ":", "obj", ".", "dtype", ".", "__str__", "(", ")", "}", "elif", "isinstance", "(", "obj", ",", "np", ".", "generic", ")", ":", "return", "np", ".", "asscalar", "(", "obj", ")", "else", ":", "print", "type", "(", "obj", ")", "raise", "TypeError", "(", "repr", "(", "obj", ")", "+", "\" is not JSON serializable\"", ")" ]
Define Specialize JSON encoder for numpy array
[ "Define", "Specialize", "JSON", "encoder", "for", "numpy", "array" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/analyzer.py#L1047-L1056
train
Parisson/TimeSide
timeside/core/analyzer.py
AnalyzerResult.render
def render(self): '''Render a matplotlib figure from the analyzer result Return the figure, use fig.show() to display if neeeded ''' fig, ax = plt.subplots() self.data_object._render_plot(ax) return fig
python
def render(self): '''Render a matplotlib figure from the analyzer result Return the figure, use fig.show() to display if neeeded ''' fig, ax = plt.subplots() self.data_object._render_plot(ax) return fig
[ "def", "render", "(", "self", ")", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "self", ".", "data_object", ".", "_render_plot", "(", "ax", ")", "return", "fig" ]
Render a matplotlib figure from the analyzer result Return the figure, use fig.show() to display if neeeded
[ "Render", "a", "matplotlib", "figure", "from", "the", "analyzer", "result" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/analyzer.py#L670-L678
train
Parisson/TimeSide
timeside/core/analyzer.py
Analyzer.new_result
def new_result(self, data_mode='value', time_mode='framewise'): ''' Create a new result Attributes ---------- data_object : MetadataObject id_metadata : MetadataObject audio_metadata : MetadataObject frame_metadata : MetadataObject label_metadata : MetadataObject parameters : dict ''' from datetime import datetime result = AnalyzerResult(data_mode=data_mode, time_mode=time_mode) # Automatically write known metadata result.id_metadata.date = datetime.now().replace( microsecond=0).isoformat(' ') result.id_metadata.version = timeside.core.__version__ result.id_metadata.author = 'TimeSide' result.id_metadata.id = self.id() result.id_metadata.name = self.name() result.id_metadata.description = self.description() result.id_metadata.unit = self.unit() result.id_metadata.proc_uuid = self.uuid() result.audio_metadata.uri = self.mediainfo()['uri'] result.audio_metadata.sha1 = self.mediainfo()['sha1'] result.audio_metadata.start = self.mediainfo()['start'] result.audio_metadata.duration = self.mediainfo()['duration'] result.audio_metadata.is_segment = self.mediainfo()['is_segment'] result.audio_metadata.channels = self.channels() result.parameters = Parameters(self.get_parameters()) if time_mode == 'framewise': result.data_object.frame_metadata.samplerate = self.result_samplerate result.data_object.frame_metadata.blocksize = self.result_blocksize result.data_object.frame_metadata.stepsize = self.result_stepsize return result
python
def new_result(self, data_mode='value', time_mode='framewise'): ''' Create a new result Attributes ---------- data_object : MetadataObject id_metadata : MetadataObject audio_metadata : MetadataObject frame_metadata : MetadataObject label_metadata : MetadataObject parameters : dict ''' from datetime import datetime result = AnalyzerResult(data_mode=data_mode, time_mode=time_mode) # Automatically write known metadata result.id_metadata.date = datetime.now().replace( microsecond=0).isoformat(' ') result.id_metadata.version = timeside.core.__version__ result.id_metadata.author = 'TimeSide' result.id_metadata.id = self.id() result.id_metadata.name = self.name() result.id_metadata.description = self.description() result.id_metadata.unit = self.unit() result.id_metadata.proc_uuid = self.uuid() result.audio_metadata.uri = self.mediainfo()['uri'] result.audio_metadata.sha1 = self.mediainfo()['sha1'] result.audio_metadata.start = self.mediainfo()['start'] result.audio_metadata.duration = self.mediainfo()['duration'] result.audio_metadata.is_segment = self.mediainfo()['is_segment'] result.audio_metadata.channels = self.channels() result.parameters = Parameters(self.get_parameters()) if time_mode == 'framewise': result.data_object.frame_metadata.samplerate = self.result_samplerate result.data_object.frame_metadata.blocksize = self.result_blocksize result.data_object.frame_metadata.stepsize = self.result_stepsize return result
[ "def", "new_result", "(", "self", ",", "data_mode", "=", "'value'", ",", "time_mode", "=", "'framewise'", ")", ":", "from", "datetime", "import", "datetime", "result", "=", "AnalyzerResult", "(", "data_mode", "=", "data_mode", ",", "time_mode", "=", "time_mode", ")", "# Automatically write known metadata", "result", ".", "id_metadata", ".", "date", "=", "datetime", ".", "now", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ")", ".", "isoformat", "(", "' '", ")", "result", ".", "id_metadata", ".", "version", "=", "timeside", ".", "core", ".", "__version__", "result", ".", "id_metadata", ".", "author", "=", "'TimeSide'", "result", ".", "id_metadata", ".", "id", "=", "self", ".", "id", "(", ")", "result", ".", "id_metadata", ".", "name", "=", "self", ".", "name", "(", ")", "result", ".", "id_metadata", ".", "description", "=", "self", ".", "description", "(", ")", "result", ".", "id_metadata", ".", "unit", "=", "self", ".", "unit", "(", ")", "result", ".", "id_metadata", ".", "proc_uuid", "=", "self", ".", "uuid", "(", ")", "result", ".", "audio_metadata", ".", "uri", "=", "self", ".", "mediainfo", "(", ")", "[", "'uri'", "]", "result", ".", "audio_metadata", ".", "sha1", "=", "self", ".", "mediainfo", "(", ")", "[", "'sha1'", "]", "result", ".", "audio_metadata", ".", "start", "=", "self", ".", "mediainfo", "(", ")", "[", "'start'", "]", "result", ".", "audio_metadata", ".", "duration", "=", "self", ".", "mediainfo", "(", ")", "[", "'duration'", "]", "result", ".", "audio_metadata", ".", "is_segment", "=", "self", ".", "mediainfo", "(", ")", "[", "'is_segment'", "]", "result", ".", "audio_metadata", ".", "channels", "=", "self", ".", "channels", "(", ")", "result", ".", "parameters", "=", "Parameters", "(", "self", ".", "get_parameters", "(", ")", ")", "if", "time_mode", "==", "'framewise'", ":", "result", ".", "data_object", ".", "frame_metadata", ".", "samplerate", "=", "self", ".", "result_samplerate", "result", ".", "data_object", ".", "frame_metadata", ".", "blocksize", "=", "self", ".", "result_blocksize", "result", ".", "data_object", ".", "frame_metadata", ".", "stepsize", "=", "self", ".", "result_stepsize", "return", "result" ]
Create a new result Attributes ---------- data_object : MetadataObject id_metadata : MetadataObject audio_metadata : MetadataObject frame_metadata : MetadataObject label_metadata : MetadataObject parameters : dict
[ "Create", "a", "new", "result" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/analyzer.py#L1279-L1324
train
Parisson/TimeSide
timeside/core/preprocessors.py
downmix_to_mono
def downmix_to_mono(process_func): ''' Pre-processing decorator that downmixes frames from multi-channel to mono Downmix is achieved by averaging all channels >>> from timeside.core.preprocessors import downmix_to_mono >>> @downmix_to_mono ... def process(analyzer,frames,eod): ... print 'Frames, eod inside process :' ... print frames, eod ... return frames, eod ... >>> import numpy as np >>> frames = np.asarray([[1,2],[3,4],[5,6],[7,8],[9,10]]) >>> eod = False >>> frames_, eod_ = process(object(),frames,eod) Frames, eod inside process : [1.5 3.5 5.5 7.5 9.5] False Outside Process frames and eod are preserved : >>> frames_ array([[ 1, 2], [ 3, 4], [ 5, 6], [ 7, 8], [ 9, 10]]) >>> eod_ False ''' import functools @functools.wraps(process_func) def wrapper(analyzer, frames, eod): # Pre-processing if frames.ndim > 1: downmix_frames = frames.mean(axis=-1) else: downmix_frames = frames # Processing process_func(analyzer, downmix_frames, eod) return frames, eod return wrapper
python
def downmix_to_mono(process_func): ''' Pre-processing decorator that downmixes frames from multi-channel to mono Downmix is achieved by averaging all channels >>> from timeside.core.preprocessors import downmix_to_mono >>> @downmix_to_mono ... def process(analyzer,frames,eod): ... print 'Frames, eod inside process :' ... print frames, eod ... return frames, eod ... >>> import numpy as np >>> frames = np.asarray([[1,2],[3,4],[5,6],[7,8],[9,10]]) >>> eod = False >>> frames_, eod_ = process(object(),frames,eod) Frames, eod inside process : [1.5 3.5 5.5 7.5 9.5] False Outside Process frames and eod are preserved : >>> frames_ array([[ 1, 2], [ 3, 4], [ 5, 6], [ 7, 8], [ 9, 10]]) >>> eod_ False ''' import functools @functools.wraps(process_func) def wrapper(analyzer, frames, eod): # Pre-processing if frames.ndim > 1: downmix_frames = frames.mean(axis=-1) else: downmix_frames = frames # Processing process_func(analyzer, downmix_frames, eod) return frames, eod return wrapper
[ "def", "downmix_to_mono", "(", "process_func", ")", ":", "import", "functools", "@", "functools", ".", "wraps", "(", "process_func", ")", "def", "wrapper", "(", "analyzer", ",", "frames", ",", "eod", ")", ":", "# Pre-processing", "if", "frames", ".", "ndim", ">", "1", ":", "downmix_frames", "=", "frames", ".", "mean", "(", "axis", "=", "-", "1", ")", "else", ":", "downmix_frames", "=", "frames", "# Processing", "process_func", "(", "analyzer", ",", "downmix_frames", ",", "eod", ")", "return", "frames", ",", "eod", "return", "wrapper" ]
Pre-processing decorator that downmixes frames from multi-channel to mono Downmix is achieved by averaging all channels >>> from timeside.core.preprocessors import downmix_to_mono >>> @downmix_to_mono ... def process(analyzer,frames,eod): ... print 'Frames, eod inside process :' ... print frames, eod ... return frames, eod ... >>> import numpy as np >>> frames = np.asarray([[1,2],[3,4],[5,6],[7,8],[9,10]]) >>> eod = False >>> frames_, eod_ = process(object(),frames,eod) Frames, eod inside process : [1.5 3.5 5.5 7.5 9.5] False Outside Process frames and eod are preserved : >>> frames_ array([[ 1, 2], [ 3, 4], [ 5, 6], [ 7, 8], [ 9, 10]]) >>> eod_ False
[ "Pre", "-", "processing", "decorator", "that", "downmixes", "frames", "from", "multi", "-", "channel", "to", "mono" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/preprocessors.py#L32-L77
train
Parisson/TimeSide
timeside/core/preprocessors.py
frames_adapter
def frames_adapter(process_func): ''' Pre-processing decorator that adapt frames to match input_blocksize and input_stepsize of the decorated analyzer >>> from timeside.core.preprocessors import frames_adapter >>> @frames_adapter ... def process(analyzer,frames,eod): ... analyzer.frames.append(frames) ... return frames, eod >>> class Fake_Analyzer(object): ... def __init__(self): ... self.input_blocksize = 4 ... self.input_stepsize = 3 ... self.frames = [] # Container for the frame as viewed by process ... @staticmethod ... def id(): ... return 'fake_analyzer' >>> import numpy as np >>> analyzer = Fake_Analyzer() >>> frames = np.asarray(range(0,12)) >>> eod = False >>> frames_, eod_ = process(analyzer,frames,eod) Inside the process the frames have been adapted to match input_blocksize and input_stepsize >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9])] Outside the process, the original frames and eod are preserved: >>> frames_ array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) >>> eod_ False Releasing the process with eod=True will zeropad the last frame if necessary >>> frames = np.asarray(range(12,14)) >>> eod = True >>> frames_, eod_ = process(analyzer,frames,eod) >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9]), array([ 9, 10, 11, 12]), array([12, 13, 0, 0])] ''' import functools import numpy as np class framesBuffer(object): def __init__(self, blocksize, stepsize): self.blocksize = blocksize self.stepsize = stepsize self.buffer = None def frames(self, frames, eod): if self.buffer is not None: stack = np.concatenate([self.buffer, frames]) else: stack = frames.copy() stack_length = len(stack) nb_frames = ( stack_length - self.blocksize + self.stepsize) // self.stepsize nb_frames = max(nb_frames, 0) frames_length = nb_frames * self.stepsize + \ self.blocksize - self.stepsize last_block_size = stack_length - frames_length if eod: # Final zeropadding pad_shape = tuple( self.blocksize - last_block_size if i == 0 else x for i, x in enumerate(frames.shape)) stack = np.concatenate([stack, np.zeros(pad_shape, dtype=frames.dtype)]) nb_frames += 1 self.buffer = stack[nb_frames * self.stepsize:] eod_list = np.repeat(False, nb_frames) if eod and len(eod_list): eod_list[-1] = eod for index, eod in zip(xrange(0, nb_frames * self.stepsize, self.stepsize), eod_list): yield (stack[index:index + self.blocksize], eod) aubio_analyzers = ['aubio_melenergy', 'aubio_mfcc', 'aubio_pitch', 'aubio_specdesc', 'aubio_temporal'] @functools.wraps(process_func) def wrapper(analyzer, frames, eod): # Pre-processing if not hasattr(analyzer, 'frames_buffer'): if analyzer.id() in aubio_analyzers: # Aubio analyzers are waiting for stepsize length block # and reconstructs blocksize length frames itself # thus frames_adapter has to provide Aubio Pitch blocksize=stepsize length frames analyzer.frames_buffer = framesBuffer(analyzer.input_stepsize, analyzer.input_stepsize) else: analyzer.frames_buffer = framesBuffer(analyzer.input_blocksize, analyzer.input_stepsize) # Processing for adapted_frames, adapted_eod in analyzer.frames_buffer.frames(frames, eod): process_func(analyzer, adapted_frames, adapted_eod) return frames, eod return wrapper
python
def frames_adapter(process_func): ''' Pre-processing decorator that adapt frames to match input_blocksize and input_stepsize of the decorated analyzer >>> from timeside.core.preprocessors import frames_adapter >>> @frames_adapter ... def process(analyzer,frames,eod): ... analyzer.frames.append(frames) ... return frames, eod >>> class Fake_Analyzer(object): ... def __init__(self): ... self.input_blocksize = 4 ... self.input_stepsize = 3 ... self.frames = [] # Container for the frame as viewed by process ... @staticmethod ... def id(): ... return 'fake_analyzer' >>> import numpy as np >>> analyzer = Fake_Analyzer() >>> frames = np.asarray(range(0,12)) >>> eod = False >>> frames_, eod_ = process(analyzer,frames,eod) Inside the process the frames have been adapted to match input_blocksize and input_stepsize >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9])] Outside the process, the original frames and eod are preserved: >>> frames_ array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) >>> eod_ False Releasing the process with eod=True will zeropad the last frame if necessary >>> frames = np.asarray(range(12,14)) >>> eod = True >>> frames_, eod_ = process(analyzer,frames,eod) >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9]), array([ 9, 10, 11, 12]), array([12, 13, 0, 0])] ''' import functools import numpy as np class framesBuffer(object): def __init__(self, blocksize, stepsize): self.blocksize = blocksize self.stepsize = stepsize self.buffer = None def frames(self, frames, eod): if self.buffer is not None: stack = np.concatenate([self.buffer, frames]) else: stack = frames.copy() stack_length = len(stack) nb_frames = ( stack_length - self.blocksize + self.stepsize) // self.stepsize nb_frames = max(nb_frames, 0) frames_length = nb_frames * self.stepsize + \ self.blocksize - self.stepsize last_block_size = stack_length - frames_length if eod: # Final zeropadding pad_shape = tuple( self.blocksize - last_block_size if i == 0 else x for i, x in enumerate(frames.shape)) stack = np.concatenate([stack, np.zeros(pad_shape, dtype=frames.dtype)]) nb_frames += 1 self.buffer = stack[nb_frames * self.stepsize:] eod_list = np.repeat(False, nb_frames) if eod and len(eod_list): eod_list[-1] = eod for index, eod in zip(xrange(0, nb_frames * self.stepsize, self.stepsize), eod_list): yield (stack[index:index + self.blocksize], eod) aubio_analyzers = ['aubio_melenergy', 'aubio_mfcc', 'aubio_pitch', 'aubio_specdesc', 'aubio_temporal'] @functools.wraps(process_func) def wrapper(analyzer, frames, eod): # Pre-processing if not hasattr(analyzer, 'frames_buffer'): if analyzer.id() in aubio_analyzers: # Aubio analyzers are waiting for stepsize length block # and reconstructs blocksize length frames itself # thus frames_adapter has to provide Aubio Pitch blocksize=stepsize length frames analyzer.frames_buffer = framesBuffer(analyzer.input_stepsize, analyzer.input_stepsize) else: analyzer.frames_buffer = framesBuffer(analyzer.input_blocksize, analyzer.input_stepsize) # Processing for adapted_frames, adapted_eod in analyzer.frames_buffer.frames(frames, eod): process_func(analyzer, adapted_frames, adapted_eod) return frames, eod return wrapper
[ "def", "frames_adapter", "(", "process_func", ")", ":", "import", "functools", "import", "numpy", "as", "np", "class", "framesBuffer", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "blocksize", ",", "stepsize", ")", ":", "self", ".", "blocksize", "=", "blocksize", "self", ".", "stepsize", "=", "stepsize", "self", ".", "buffer", "=", "None", "def", "frames", "(", "self", ",", "frames", ",", "eod", ")", ":", "if", "self", ".", "buffer", "is", "not", "None", ":", "stack", "=", "np", ".", "concatenate", "(", "[", "self", ".", "buffer", ",", "frames", "]", ")", "else", ":", "stack", "=", "frames", ".", "copy", "(", ")", "stack_length", "=", "len", "(", "stack", ")", "nb_frames", "=", "(", "stack_length", "-", "self", ".", "blocksize", "+", "self", ".", "stepsize", ")", "//", "self", ".", "stepsize", "nb_frames", "=", "max", "(", "nb_frames", ",", "0", ")", "frames_length", "=", "nb_frames", "*", "self", ".", "stepsize", "+", "self", ".", "blocksize", "-", "self", ".", "stepsize", "last_block_size", "=", "stack_length", "-", "frames_length", "if", "eod", ":", "# Final zeropadding", "pad_shape", "=", "tuple", "(", "self", ".", "blocksize", "-", "last_block_size", "if", "i", "==", "0", "else", "x", "for", "i", ",", "x", "in", "enumerate", "(", "frames", ".", "shape", ")", ")", "stack", "=", "np", ".", "concatenate", "(", "[", "stack", ",", "np", ".", "zeros", "(", "pad_shape", ",", "dtype", "=", "frames", ".", "dtype", ")", "]", ")", "nb_frames", "+=", "1", "self", ".", "buffer", "=", "stack", "[", "nb_frames", "*", "self", ".", "stepsize", ":", "]", "eod_list", "=", "np", ".", "repeat", "(", "False", ",", "nb_frames", ")", "if", "eod", "and", "len", "(", "eod_list", ")", ":", "eod_list", "[", "-", "1", "]", "=", "eod", "for", "index", ",", "eod", "in", "zip", "(", "xrange", "(", "0", ",", "nb_frames", "*", "self", ".", "stepsize", ",", "self", ".", "stepsize", ")", ",", "eod_list", ")", ":", "yield", "(", "stack", "[", "index", ":", "index", "+", "self", ".", "blocksize", "]", ",", "eod", ")", "aubio_analyzers", "=", "[", "'aubio_melenergy'", ",", "'aubio_mfcc'", ",", "'aubio_pitch'", ",", "'aubio_specdesc'", ",", "'aubio_temporal'", "]", "@", "functools", ".", "wraps", "(", "process_func", ")", "def", "wrapper", "(", "analyzer", ",", "frames", ",", "eod", ")", ":", "# Pre-processing", "if", "not", "hasattr", "(", "analyzer", ",", "'frames_buffer'", ")", ":", "if", "analyzer", ".", "id", "(", ")", "in", "aubio_analyzers", ":", "# Aubio analyzers are waiting for stepsize length block", "# and reconstructs blocksize length frames itself", "# thus frames_adapter has to provide Aubio Pitch blocksize=stepsize length frames", "analyzer", ".", "frames_buffer", "=", "framesBuffer", "(", "analyzer", ".", "input_stepsize", ",", "analyzer", ".", "input_stepsize", ")", "else", ":", "analyzer", ".", "frames_buffer", "=", "framesBuffer", "(", "analyzer", ".", "input_blocksize", ",", "analyzer", ".", "input_stepsize", ")", "# Processing", "for", "adapted_frames", ",", "adapted_eod", "in", "analyzer", ".", "frames_buffer", ".", "frames", "(", "frames", ",", "eod", ")", ":", "process_func", "(", "analyzer", ",", "adapted_frames", ",", "adapted_eod", ")", "return", "frames", ",", "eod", "return", "wrapper" ]
Pre-processing decorator that adapt frames to match input_blocksize and input_stepsize of the decorated analyzer >>> from timeside.core.preprocessors import frames_adapter >>> @frames_adapter ... def process(analyzer,frames,eod): ... analyzer.frames.append(frames) ... return frames, eod >>> class Fake_Analyzer(object): ... def __init__(self): ... self.input_blocksize = 4 ... self.input_stepsize = 3 ... self.frames = [] # Container for the frame as viewed by process ... @staticmethod ... def id(): ... return 'fake_analyzer' >>> import numpy as np >>> analyzer = Fake_Analyzer() >>> frames = np.asarray(range(0,12)) >>> eod = False >>> frames_, eod_ = process(analyzer,frames,eod) Inside the process the frames have been adapted to match input_blocksize and input_stepsize >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9])] Outside the process, the original frames and eod are preserved: >>> frames_ array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) >>> eod_ False Releasing the process with eod=True will zeropad the last frame if necessary >>> frames = np.asarray(range(12,14)) >>> eod = True >>> frames_, eod_ = process(analyzer,frames,eod) >>> analyzer.frames [array([0, 1, 2, 3]), array([3, 4, 5, 6]), array([6, 7, 8, 9]), array([ 9, 10, 11, 12]), array([12, 13, 0, 0])]
[ "Pre", "-", "processing", "decorator", "that", "adapt", "frames", "to", "match", "input_blocksize", "and", "input_stepsize", "of", "the", "decorated", "analyzer" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/preprocessors.py#L80-L190
train
Parisson/TimeSide
timeside/server/models.py
Item.get_uri
def get_uri(self): """Return the Item source""" if self.source_file and os.path.exists(self.source_file.path): return self.source_file.path elif self.source_url: return self.source_url return None
python
def get_uri(self): """Return the Item source""" if self.source_file and os.path.exists(self.source_file.path): return self.source_file.path elif self.source_url: return self.source_url return None
[ "def", "get_uri", "(", "self", ")", ":", "if", "self", ".", "source_file", "and", "os", ".", "path", ".", "exists", "(", "self", ".", "source_file", ".", "path", ")", ":", "return", "self", ".", "source_file", ".", "path", "elif", "self", ".", "source_url", ":", "return", "self", ".", "source_url", "return", "None" ]
Return the Item source
[ "Return", "the", "Item", "source" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/server/models.py#L184-L190
train
Parisson/TimeSide
timeside/server/models.py
Item.get_audio_duration
def get_audio_duration(self): """ Return item audio duration """ decoder = timeside.core.get_processor('file_decoder')( uri=self.get_uri()) return decoder.uri_total_duration
python
def get_audio_duration(self): """ Return item audio duration """ decoder = timeside.core.get_processor('file_decoder')( uri=self.get_uri()) return decoder.uri_total_duration
[ "def", "get_audio_duration", "(", "self", ")", ":", "decoder", "=", "timeside", ".", "core", ".", "get_processor", "(", "'file_decoder'", ")", "(", "uri", "=", "self", ".", "get_uri", "(", ")", ")", "return", "decoder", ".", "uri_total_duration" ]
Return item audio duration
[ "Return", "item", "audio", "duration" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/server/models.py#L192-L198
train
Parisson/TimeSide
timeside/server/models.py
Item.get_results_path
def get_results_path(self): """ Return Item result path """ result_path = os.path.join(RESULTS_ROOT, self.uuid) if not os.path.exists(result_path): os.makedirs(result_path) return result_path
python
def get_results_path(self): """ Return Item result path """ result_path = os.path.join(RESULTS_ROOT, self.uuid) if not os.path.exists(result_path): os.makedirs(result_path) return result_path
[ "def", "get_results_path", "(", "self", ")", ":", "result_path", "=", "os", ".", "path", ".", "join", "(", "RESULTS_ROOT", ",", "self", ".", "uuid", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "result_path", ")", ":", "os", ".", "makedirs", "(", "result_path", ")", "return", "result_path" ]
Return Item result path
[ "Return", "Item", "result", "path" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/server/models.py#L200-L207
train
Parisson/TimeSide
timeside/plugins/decoder/utils.py
get_uri
def get_uri(source): """ Check a media source as a valid file or uri and return the proper uri """ import gst src_info = source_info(source) if src_info['is_file']: # Is this a file? return get_uri(src_info['uri']) elif gst.uri_is_valid(source): # Is this a valid URI source for Gstreamer uri_protocol = gst.uri_get_protocol(source) if gst.uri_protocol_is_supported(gst.URI_SRC, uri_protocol): return source else: raise IOError('Invalid URI source for Gstreamer') else: raise IOError('Failed getting uri for path %s: no such file' % source)
python
def get_uri(source): """ Check a media source as a valid file or uri and return the proper uri """ import gst src_info = source_info(source) if src_info['is_file']: # Is this a file? return get_uri(src_info['uri']) elif gst.uri_is_valid(source): # Is this a valid URI source for Gstreamer uri_protocol = gst.uri_get_protocol(source) if gst.uri_protocol_is_supported(gst.URI_SRC, uri_protocol): return source else: raise IOError('Invalid URI source for Gstreamer') else: raise IOError('Failed getting uri for path %s: no such file' % source)
[ "def", "get_uri", "(", "source", ")", ":", "import", "gst", "src_info", "=", "source_info", "(", "source", ")", "if", "src_info", "[", "'is_file'", "]", ":", "# Is this a file?", "return", "get_uri", "(", "src_info", "[", "'uri'", "]", ")", "elif", "gst", ".", "uri_is_valid", "(", "source", ")", ":", "# Is this a valid URI source for Gstreamer", "uri_protocol", "=", "gst", ".", "uri_get_protocol", "(", "source", ")", "if", "gst", ".", "uri_protocol_is_supported", "(", "gst", ".", "URI_SRC", ",", "uri_protocol", ")", ":", "return", "source", "else", ":", "raise", "IOError", "(", "'Invalid URI source for Gstreamer'", ")", "else", ":", "raise", "IOError", "(", "'Failed getting uri for path %s: no such file'", "%", "source", ")" ]
Check a media source as a valid file or uri and return the proper uri
[ "Check", "a", "media", "source", "as", "a", "valid", "file", "or", "uri", "and", "return", "the", "proper", "uri" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/decoder/utils.py#L100-L119
train
Parisson/TimeSide
timeside/plugins/decoder/utils.py
sha1sum_file
def sha1sum_file(filename): ''' Return the secure hash digest with sha1 algorithm for a given file >>> from timeside.core.tools.test_samples import samples >>> wav_file = samples["C4_scale.wav"] >>> print sha1sum_file(wav_file) a598e78d0b5c90da54a77e34c083abdcd38d42ba ''' import hashlib import io sha1 = hashlib.sha1() chunk_size = sha1.block_size * io.DEFAULT_BUFFER_SIZE with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(chunk_size), b''): sha1.update(chunk) return sha1.hexdigest()
python
def sha1sum_file(filename): ''' Return the secure hash digest with sha1 algorithm for a given file >>> from timeside.core.tools.test_samples import samples >>> wav_file = samples["C4_scale.wav"] >>> print sha1sum_file(wav_file) a598e78d0b5c90da54a77e34c083abdcd38d42ba ''' import hashlib import io sha1 = hashlib.sha1() chunk_size = sha1.block_size * io.DEFAULT_BUFFER_SIZE with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(chunk_size), b''): sha1.update(chunk) return sha1.hexdigest()
[ "def", "sha1sum_file", "(", "filename", ")", ":", "import", "hashlib", "import", "io", "sha1", "=", "hashlib", ".", "sha1", "(", ")", "chunk_size", "=", "sha1", ".", "block_size", "*", "io", ".", "DEFAULT_BUFFER_SIZE", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "for", "chunk", "in", "iter", "(", "lambda", ":", "f", ".", "read", "(", "chunk_size", ")", ",", "b''", ")", ":", "sha1", ".", "update", "(", "chunk", ")", "return", "sha1", ".", "hexdigest", "(", ")" ]
Return the secure hash digest with sha1 algorithm for a given file >>> from timeside.core.tools.test_samples import samples >>> wav_file = samples["C4_scale.wav"] >>> print sha1sum_file(wav_file) a598e78d0b5c90da54a77e34c083abdcd38d42ba
[ "Return", "the", "secure", "hash", "digest", "with", "sha1", "algorithm", "for", "a", "given", "file" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/decoder/utils.py#L180-L198
train
Parisson/TimeSide
timeside/plugins/decoder/utils.py
sha1sum_url
def sha1sum_url(url): '''Return the secure hash digest with sha1 algorithm for a given url >>> url = "https://github.com/yomguy/timeside-samples/raw/master/samples/guitar.wav" >>> print sha1sum_url(url) 08301c3f9a8d60926f31e253825cc74263e52ad1 ''' import hashlib import urllib from contextlib import closing sha1 = hashlib.sha1() chunk_size = sha1.block_size * 8192 max_file_size = 10 * 1024 * 1024 # 10Mo limit in case of very large file total_read = 0 with closing(urllib.urlopen(url)) as url_obj: for chunk in iter(lambda: url_obj.read(chunk_size), b''): sha1.update(chunk) total_read += chunk_size if total_read > max_file_size: break return sha1.hexdigest()
python
def sha1sum_url(url): '''Return the secure hash digest with sha1 algorithm for a given url >>> url = "https://github.com/yomguy/timeside-samples/raw/master/samples/guitar.wav" >>> print sha1sum_url(url) 08301c3f9a8d60926f31e253825cc74263e52ad1 ''' import hashlib import urllib from contextlib import closing sha1 = hashlib.sha1() chunk_size = sha1.block_size * 8192 max_file_size = 10 * 1024 * 1024 # 10Mo limit in case of very large file total_read = 0 with closing(urllib.urlopen(url)) as url_obj: for chunk in iter(lambda: url_obj.read(chunk_size), b''): sha1.update(chunk) total_read += chunk_size if total_read > max_file_size: break return sha1.hexdigest()
[ "def", "sha1sum_url", "(", "url", ")", ":", "import", "hashlib", "import", "urllib", "from", "contextlib", "import", "closing", "sha1", "=", "hashlib", ".", "sha1", "(", ")", "chunk_size", "=", "sha1", ".", "block_size", "*", "8192", "max_file_size", "=", "10", "*", "1024", "*", "1024", "# 10Mo limit in case of very large file", "total_read", "=", "0", "with", "closing", "(", "urllib", ".", "urlopen", "(", "url", ")", ")", "as", "url_obj", ":", "for", "chunk", "in", "iter", "(", "lambda", ":", "url_obj", ".", "read", "(", "chunk_size", ")", ",", "b''", ")", ":", "sha1", ".", "update", "(", "chunk", ")", "total_read", "+=", "chunk_size", "if", "total_read", ">", "max_file_size", ":", "break", "return", "sha1", ".", "hexdigest", "(", ")" ]
Return the secure hash digest with sha1 algorithm for a given url >>> url = "https://github.com/yomguy/timeside-samples/raw/master/samples/guitar.wav" >>> print sha1sum_url(url) 08301c3f9a8d60926f31e253825cc74263e52ad1
[ "Return", "the", "secure", "hash", "digest", "with", "sha1", "algorithm", "for", "a", "given", "url" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/decoder/utils.py#L201-L226
train
Parisson/TimeSide
timeside/plugins/decoder/utils.py
sha1sum_numpy
def sha1sum_numpy(np_array): ''' Return the secure hash digest with sha1 algorithm for a numpy array ''' import hashlib return hashlib.sha1(np_array.view(np.uint8)).hexdigest()
python
def sha1sum_numpy(np_array): ''' Return the secure hash digest with sha1 algorithm for a numpy array ''' import hashlib return hashlib.sha1(np_array.view(np.uint8)).hexdigest()
[ "def", "sha1sum_numpy", "(", "np_array", ")", ":", "import", "hashlib", "return", "hashlib", ".", "sha1", "(", "np_array", ".", "view", "(", "np", ".", "uint8", ")", ")", ".", "hexdigest", "(", ")" ]
Return the secure hash digest with sha1 algorithm for a numpy array
[ "Return", "the", "secure", "hash", "digest", "with", "sha1", "algorithm", "for", "a", "numpy", "array" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/decoder/utils.py#L229-L234
train
Parisson/TimeSide
timeside/core/tools/package.py
import_module_with_exceptions
def import_module_with_exceptions(name, package=None): """Wrapper around importlib.import_module to import TimeSide subpackage and ignoring ImportError if Aubio, Yaafe and Vamp Host are not available""" from timeside.core import _WITH_AUBIO, _WITH_YAAFE, _WITH_VAMP if name.count('.server.'): # TODO: # Temporary skip all timeside.server submodules before check dependencies return try: import_module(name, package) except VampImportError: # No Vamp Host if _WITH_VAMP: raise VampImportError else: # Ignore Vamp ImportError return except ImportError as e: if str(e).count('yaafelib') and not _WITH_YAAFE: # Ignore Yaafe ImportError return elif str(e).count('aubio') and not _WITH_AUBIO: # Ignore Aubio ImportError return elif str(e).count('DJANGO_SETTINGS_MODULE'): # Ignore module requiring DJANGO_SETTINGS_MODULE in environnement return else: print (name, package) raise e return name
python
def import_module_with_exceptions(name, package=None): """Wrapper around importlib.import_module to import TimeSide subpackage and ignoring ImportError if Aubio, Yaafe and Vamp Host are not available""" from timeside.core import _WITH_AUBIO, _WITH_YAAFE, _WITH_VAMP if name.count('.server.'): # TODO: # Temporary skip all timeside.server submodules before check dependencies return try: import_module(name, package) except VampImportError: # No Vamp Host if _WITH_VAMP: raise VampImportError else: # Ignore Vamp ImportError return except ImportError as e: if str(e).count('yaafelib') and not _WITH_YAAFE: # Ignore Yaafe ImportError return elif str(e).count('aubio') and not _WITH_AUBIO: # Ignore Aubio ImportError return elif str(e).count('DJANGO_SETTINGS_MODULE'): # Ignore module requiring DJANGO_SETTINGS_MODULE in environnement return else: print (name, package) raise e return name
[ "def", "import_module_with_exceptions", "(", "name", ",", "package", "=", "None", ")", ":", "from", "timeside", ".", "core", "import", "_WITH_AUBIO", ",", "_WITH_YAAFE", ",", "_WITH_VAMP", "if", "name", ".", "count", "(", "'.server.'", ")", ":", "# TODO:", "# Temporary skip all timeside.server submodules before check dependencies", "return", "try", ":", "import_module", "(", "name", ",", "package", ")", "except", "VampImportError", ":", "# No Vamp Host", "if", "_WITH_VAMP", ":", "raise", "VampImportError", "else", ":", "# Ignore Vamp ImportError", "return", "except", "ImportError", "as", "e", ":", "if", "str", "(", "e", ")", ".", "count", "(", "'yaafelib'", ")", "and", "not", "_WITH_YAAFE", ":", "# Ignore Yaafe ImportError", "return", "elif", "str", "(", "e", ")", ".", "count", "(", "'aubio'", ")", "and", "not", "_WITH_AUBIO", ":", "# Ignore Aubio ImportError", "return", "elif", "str", "(", "e", ")", ".", "count", "(", "'DJANGO_SETTINGS_MODULE'", ")", ":", "# Ignore module requiring DJANGO_SETTINGS_MODULE in environnement", "return", "else", ":", "print", "(", "name", ",", "package", ")", "raise", "e", "return", "name" ]
Wrapper around importlib.import_module to import TimeSide subpackage and ignoring ImportError if Aubio, Yaafe and Vamp Host are not available
[ "Wrapper", "around", "importlib", ".", "import_module", "to", "import", "TimeSide", "subpackage", "and", "ignoring", "ImportError", "if", "Aubio", "Yaafe", "and", "Vamp", "Host", "are", "not", "available" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/tools/package.py#L50-L82
train
Parisson/TimeSide
timeside/core/tools/package.py
check_vamp
def check_vamp(): "Check Vamp host availability" try: from timeside.plugins.analyzer.externals import vamp_plugin except VampImportError: warnings.warn('Vamp host is not available', ImportWarning, stacklevel=2) _WITH_VAMP = False else: _WITH_VAMP = True del vamp_plugin return _WITH_VAMP
python
def check_vamp(): "Check Vamp host availability" try: from timeside.plugins.analyzer.externals import vamp_plugin except VampImportError: warnings.warn('Vamp host is not available', ImportWarning, stacklevel=2) _WITH_VAMP = False else: _WITH_VAMP = True del vamp_plugin return _WITH_VAMP
[ "def", "check_vamp", "(", ")", ":", "try", ":", "from", "timeside", ".", "plugins", ".", "analyzer", ".", "externals", "import", "vamp_plugin", "except", "VampImportError", ":", "warnings", ".", "warn", "(", "'Vamp host is not available'", ",", "ImportWarning", ",", "stacklevel", "=", "2", ")", "_WITH_VAMP", "=", "False", "else", ":", "_WITH_VAMP", "=", "True", "del", "vamp_plugin", "return", "_WITH_VAMP" ]
Check Vamp host availability
[ "Check", "Vamp", "host", "availability" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/tools/package.py#L115-L128
train
Parisson/TimeSide
timeside/plugins/grapher/utils.py
im_watermark
def im_watermark(im, inputtext, font=None, color=None, opacity=.6, margin=(30, 30)): """imprints a PIL image with the indicated text in lower-right corner""" if im.mode != "RGBA": im = im.convert("RGBA") textlayer = Image.new("RGBA", im.size, (0, 0, 0, 0)) textdraw = ImageDraw.Draw(textlayer) textsize = textdraw.textsize(inputtext, font=font) textpos = [im.size[i] - textsize[i] - margin[i] for i in [0, 1]] textdraw.text(textpos, inputtext, font=font, fill=color) if opacity != 1: textlayer = reduce_opacity(textlayer, opacity) return Image.composite(textlayer, im, textlayer)
python
def im_watermark(im, inputtext, font=None, color=None, opacity=.6, margin=(30, 30)): """imprints a PIL image with the indicated text in lower-right corner""" if im.mode != "RGBA": im = im.convert("RGBA") textlayer = Image.new("RGBA", im.size, (0, 0, 0, 0)) textdraw = ImageDraw.Draw(textlayer) textsize = textdraw.textsize(inputtext, font=font) textpos = [im.size[i] - textsize[i] - margin[i] for i in [0, 1]] textdraw.text(textpos, inputtext, font=font, fill=color) if opacity != 1: textlayer = reduce_opacity(textlayer, opacity) return Image.composite(textlayer, im, textlayer)
[ "def", "im_watermark", "(", "im", ",", "inputtext", ",", "font", "=", "None", ",", "color", "=", "None", ",", "opacity", "=", ".6", ",", "margin", "=", "(", "30", ",", "30", ")", ")", ":", "if", "im", ".", "mode", "!=", "\"RGBA\"", ":", "im", "=", "im", ".", "convert", "(", "\"RGBA\"", ")", "textlayer", "=", "Image", ".", "new", "(", "\"RGBA\"", ",", "im", ".", "size", ",", "(", "0", ",", "0", ",", "0", ",", "0", ")", ")", "textdraw", "=", "ImageDraw", ".", "Draw", "(", "textlayer", ")", "textsize", "=", "textdraw", ".", "textsize", "(", "inputtext", ",", "font", "=", "font", ")", "textpos", "=", "[", "im", ".", "size", "[", "i", "]", "-", "textsize", "[", "i", "]", "-", "margin", "[", "i", "]", "for", "i", "in", "[", "0", ",", "1", "]", "]", "textdraw", ".", "text", "(", "textpos", ",", "inputtext", ",", "font", "=", "font", ",", "fill", "=", "color", ")", "if", "opacity", "!=", "1", ":", "textlayer", "=", "reduce_opacity", "(", "textlayer", ",", "opacity", ")", "return", "Image", ".", "composite", "(", "textlayer", ",", "im", ",", "textlayer", ")" ]
imprints a PIL image with the indicated text in lower-right corner
[ "imprints", "a", "PIL", "image", "with", "the", "indicated", "text", "in", "lower", "-", "right", "corner" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/grapher/utils.py#L168-L179
train
Parisson/TimeSide
timeside/plugins/analyzer/utils.py
nextpow2
def nextpow2(value): """Compute the nearest power of two greater or equal to the input value""" if value >= 1: return 2**np.ceil(np.log2(value)).astype(int) elif value > 0: return 1 elif value == 0: return 0 else: raise ValueError('Value must be positive')
python
def nextpow2(value): """Compute the nearest power of two greater or equal to the input value""" if value >= 1: return 2**np.ceil(np.log2(value)).astype(int) elif value > 0: return 1 elif value == 0: return 0 else: raise ValueError('Value must be positive')
[ "def", "nextpow2", "(", "value", ")", ":", "if", "value", ">=", "1", ":", "return", "2", "**", "np", ".", "ceil", "(", "np", ".", "log2", "(", "value", ")", ")", ".", "astype", "(", "int", ")", "elif", "value", ">", "0", ":", "return", "1", "elif", "value", "==", "0", ":", "return", "0", "else", ":", "raise", "ValueError", "(", "'Value must be positive'", ")" ]
Compute the nearest power of two greater or equal to the input value
[ "Compute", "the", "nearest", "power", "of", "two", "greater", "or", "equal", "to", "the", "input", "value" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/analyzer/utils.py#L65-L74
train
Parisson/TimeSide
timeside/core/processor.py
FixedSizeInputAdapter.blocksize
def blocksize(self, input_totalframes): """Return the total number of frames that this adapter will output according to the input_totalframes argument""" blocksize = input_totalframes if self.pad: mod = input_totalframes % self.buffer_size if mod: blocksize += self.buffer_size - mod return blocksize
python
def blocksize(self, input_totalframes): """Return the total number of frames that this adapter will output according to the input_totalframes argument""" blocksize = input_totalframes if self.pad: mod = input_totalframes % self.buffer_size if mod: blocksize += self.buffer_size - mod return blocksize
[ "def", "blocksize", "(", "self", ",", "input_totalframes", ")", ":", "blocksize", "=", "input_totalframes", "if", "self", ".", "pad", ":", "mod", "=", "input_totalframes", "%", "self", ".", "buffer_size", "if", "mod", ":", "blocksize", "+=", "self", ".", "buffer_size", "-", "mod", "return", "blocksize" ]
Return the total number of frames that this adapter will output according to the input_totalframes argument
[ "Return", "the", "total", "number", "of", "frames", "that", "this", "adapter", "will", "output", "according", "to", "the", "input_totalframes", "argument" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/processor.py#L218-L228
train
Parisson/TimeSide
timeside/core/processor.py
ProcessPipe.append_processor
def append_processor(self, proc, source_proc=None): "Append a new processor to the pipe" if source_proc is None and len(self.processors): source_proc = self.processors[0] if source_proc and not isinstance(source_proc, Processor): raise TypeError('source_proc must be a Processor or None') if not isinstance(proc, Processor): raise TypeError('proc must be a Processor or None') if proc.type == 'decoder' and len(self.processors): raise ValueError('Only the first processor in a pipe could be a Decoder') # TODO : check if the processor is already in the pipe if source_proc: for child in self._graph.neighbors_iter(source_proc.uuid()): child_proc = self._graph.node[child]['processor'] if proc == child_proc: proc._uuid = child_proc.uuid() proc.process_pipe = self break if not self._graph.has_node(proc.uuid()): self.processors.append(proc) # Add processor to the pipe self._graph.add_node(proc.uuid(), processor=proc, id=proc.id()) if source_proc: self._graph.add_edge(self.processors[0].uuid(), proc.uuid(), type='audio_source') proc.process_pipe = self # Add an edge between each parent and proc for parent in proc.parents.values(): self._graph.add_edge(parent.uuid(), proc.uuid(), type='data_source')
python
def append_processor(self, proc, source_proc=None): "Append a new processor to the pipe" if source_proc is None and len(self.processors): source_proc = self.processors[0] if source_proc and not isinstance(source_proc, Processor): raise TypeError('source_proc must be a Processor or None') if not isinstance(proc, Processor): raise TypeError('proc must be a Processor or None') if proc.type == 'decoder' and len(self.processors): raise ValueError('Only the first processor in a pipe could be a Decoder') # TODO : check if the processor is already in the pipe if source_proc: for child in self._graph.neighbors_iter(source_proc.uuid()): child_proc = self._graph.node[child]['processor'] if proc == child_proc: proc._uuid = child_proc.uuid() proc.process_pipe = self break if not self._graph.has_node(proc.uuid()): self.processors.append(proc) # Add processor to the pipe self._graph.add_node(proc.uuid(), processor=proc, id=proc.id()) if source_proc: self._graph.add_edge(self.processors[0].uuid(), proc.uuid(), type='audio_source') proc.process_pipe = self # Add an edge between each parent and proc for parent in proc.parents.values(): self._graph.add_edge(parent.uuid(), proc.uuid(), type='data_source')
[ "def", "append_processor", "(", "self", ",", "proc", ",", "source_proc", "=", "None", ")", ":", "if", "source_proc", "is", "None", "and", "len", "(", "self", ".", "processors", ")", ":", "source_proc", "=", "self", ".", "processors", "[", "0", "]", "if", "source_proc", "and", "not", "isinstance", "(", "source_proc", ",", "Processor", ")", ":", "raise", "TypeError", "(", "'source_proc must be a Processor or None'", ")", "if", "not", "isinstance", "(", "proc", ",", "Processor", ")", ":", "raise", "TypeError", "(", "'proc must be a Processor or None'", ")", "if", "proc", ".", "type", "==", "'decoder'", "and", "len", "(", "self", ".", "processors", ")", ":", "raise", "ValueError", "(", "'Only the first processor in a pipe could be a Decoder'", ")", "# TODO : check if the processor is already in the pipe", "if", "source_proc", ":", "for", "child", "in", "self", ".", "_graph", ".", "neighbors_iter", "(", "source_proc", ".", "uuid", "(", ")", ")", ":", "child_proc", "=", "self", ".", "_graph", ".", "node", "[", "child", "]", "[", "'processor'", "]", "if", "proc", "==", "child_proc", ":", "proc", ".", "_uuid", "=", "child_proc", ".", "uuid", "(", ")", "proc", ".", "process_pipe", "=", "self", "break", "if", "not", "self", ".", "_graph", ".", "has_node", "(", "proc", ".", "uuid", "(", ")", ")", ":", "self", ".", "processors", ".", "append", "(", "proc", ")", "# Add processor to the pipe", "self", ".", "_graph", ".", "add_node", "(", "proc", ".", "uuid", "(", ")", ",", "processor", "=", "proc", ",", "id", "=", "proc", ".", "id", "(", ")", ")", "if", "source_proc", ":", "self", ".", "_graph", ".", "add_edge", "(", "self", ".", "processors", "[", "0", "]", ".", "uuid", "(", ")", ",", "proc", ".", "uuid", "(", ")", ",", "type", "=", "'audio_source'", ")", "proc", ".", "process_pipe", "=", "self", "# Add an edge between each parent and proc", "for", "parent", "in", "proc", ".", "parents", ".", "values", "(", ")", ":", "self", ".", "_graph", ".", "add_edge", "(", "parent", ".", "uuid", "(", ")", ",", "proc", ".", "uuid", "(", ")", ",", "type", "=", "'data_source'", ")" ]
Append a new processor to the pipe
[ "Append", "a", "new", "processor", "to", "the", "pipe" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/processor.py#L337-L369
train
Parisson/TimeSide
timeside/plugins/analyzer/externals/vamp_plugin.py
simple_host_process
def simple_host_process(argslist): """Call vamp-simple-host""" vamp_host = 'vamp-simple-host' command = [vamp_host] command.extend(argslist) # try ? stdout = subprocess.check_output(command, stderr=subprocess.STDOUT).splitlines() return stdout
python
def simple_host_process(argslist): """Call vamp-simple-host""" vamp_host = 'vamp-simple-host' command = [vamp_host] command.extend(argslist) # try ? stdout = subprocess.check_output(command, stderr=subprocess.STDOUT).splitlines() return stdout
[ "def", "simple_host_process", "(", "argslist", ")", ":", "vamp_host", "=", "'vamp-simple-host'", "command", "=", "[", "vamp_host", "]", "command", ".", "extend", "(", "argslist", ")", "# try ?", "stdout", "=", "subprocess", ".", "check_output", "(", "command", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", ".", "splitlines", "(", ")", "return", "stdout" ]
Call vamp-simple-host
[ "Call", "vamp", "-", "simple", "-", "host" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/analyzer/externals/vamp_plugin.py#L33-L43
train
Parisson/TimeSide
timeside/plugins/grapher/spectrogram_lin.py
SpectrogramLinear.set_scale
def set_scale(self): """generate the lookup which translates y-coordinate to fft-bin""" f_min = float(self.lower_freq) f_max = float(self.higher_freq) y_min = f_min y_max = f_max for y in range(self.image_height): freq = y_min + y / (self.image_height - 1.0) * (y_max - y_min) fft_bin = freq / f_max * (self.fft_size / 2 + 1) if fft_bin < self.fft_size / 2: alpha = fft_bin - int(fft_bin) self.y_to_bin.append((int(fft_bin), alpha * 255))
python
def set_scale(self): """generate the lookup which translates y-coordinate to fft-bin""" f_min = float(self.lower_freq) f_max = float(self.higher_freq) y_min = f_min y_max = f_max for y in range(self.image_height): freq = y_min + y / (self.image_height - 1.0) * (y_max - y_min) fft_bin = freq / f_max * (self.fft_size / 2 + 1) if fft_bin < self.fft_size / 2: alpha = fft_bin - int(fft_bin) self.y_to_bin.append((int(fft_bin), alpha * 255))
[ "def", "set_scale", "(", "self", ")", ":", "f_min", "=", "float", "(", "self", ".", "lower_freq", ")", "f_max", "=", "float", "(", "self", ".", "higher_freq", ")", "y_min", "=", "f_min", "y_max", "=", "f_max", "for", "y", "in", "range", "(", "self", ".", "image_height", ")", ":", "freq", "=", "y_min", "+", "y", "/", "(", "self", ".", "image_height", "-", "1.0", ")", "*", "(", "y_max", "-", "y_min", ")", "fft_bin", "=", "freq", "/", "f_max", "*", "(", "self", ".", "fft_size", "/", "2", "+", "1", ")", "if", "fft_bin", "<", "self", ".", "fft_size", "/", "2", ":", "alpha", "=", "fft_bin", "-", "int", "(", "fft_bin", ")", "self", ".", "y_to_bin", ".", "append", "(", "(", "int", "(", "fft_bin", ")", ",", "alpha", "*", "255", ")", ")" ]
generate the lookup which translates y-coordinate to fft-bin
[ "generate", "the", "lookup", "which", "translates", "y", "-", "coordinate", "to", "fft", "-", "bin" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/grapher/spectrogram_lin.py#L55-L67
train
Parisson/TimeSide
timeside/core/tools/hdf5.py
dict_from_hdf5
def dict_from_hdf5(dict_like, h5group): """ Load a dictionnary-like object from a h5 file group """ # Read attributes for name, value in h5group.attrs.items(): dict_like[name] = value
python
def dict_from_hdf5(dict_like, h5group): """ Load a dictionnary-like object from a h5 file group """ # Read attributes for name, value in h5group.attrs.items(): dict_like[name] = value
[ "def", "dict_from_hdf5", "(", "dict_like", ",", "h5group", ")", ":", "# Read attributes", "for", "name", ",", "value", "in", "h5group", ".", "attrs", ".", "items", "(", ")", ":", "dict_like", "[", "name", "]", "=", "value" ]
Load a dictionnary-like object from a h5 file group
[ "Load", "a", "dictionnary", "-", "like", "object", "from", "a", "h5", "file", "group" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/tools/hdf5.py#L34-L40
train
Parisson/TimeSide
timeside/plugins/decoder/array.py
ArrayDecoder.get_frames
def get_frames(self): "Define an iterator that will return frames at the given blocksize" nb_frames = self.input_totalframes // self.output_blocksize if self.input_totalframes % self.output_blocksize == 0: nb_frames -= 1 # Last frame must send eod=True for index in xrange(0, nb_frames * self.output_blocksize, self.output_blocksize): yield (self.samples[index:index + self.output_blocksize], False) yield (self.samples[nb_frames * self.output_blocksize:], True)
python
def get_frames(self): "Define an iterator that will return frames at the given blocksize" nb_frames = self.input_totalframes // self.output_blocksize if self.input_totalframes % self.output_blocksize == 0: nb_frames -= 1 # Last frame must send eod=True for index in xrange(0, nb_frames * self.output_blocksize, self.output_blocksize): yield (self.samples[index:index + self.output_blocksize], False) yield (self.samples[nb_frames * self.output_blocksize:], True)
[ "def", "get_frames", "(", "self", ")", ":", "nb_frames", "=", "self", ".", "input_totalframes", "//", "self", ".", "output_blocksize", "if", "self", ".", "input_totalframes", "%", "self", ".", "output_blocksize", "==", "0", ":", "nb_frames", "-=", "1", "# Last frame must send eod=True", "for", "index", "in", "xrange", "(", "0", ",", "nb_frames", "*", "self", ".", "output_blocksize", ",", "self", ".", "output_blocksize", ")", ":", "yield", "(", "self", ".", "samples", "[", "index", ":", "index", "+", "self", ".", "output_blocksize", "]", ",", "False", ")", "yield", "(", "self", ".", "samples", "[", "nb_frames", "*", "self", ".", "output_blocksize", ":", "]", ",", "True", ")" ]
Define an iterator that will return frames at the given blocksize
[ "Define", "an", "iterator", "that", "will", "return", "frames", "at", "the", "given", "blocksize" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/decoder/array.py#L113-L125
train
Parisson/TimeSide
timeside/core/component.py
implementations
def implementations(interface, recurse=True, abstract=False): """Returns the components implementing interface, and if recurse, any of the descendants of interface. If abstract is True, also return the abstract implementations.""" result = [] find_implementations(interface, recurse, abstract, result) return result
python
def implementations(interface, recurse=True, abstract=False): """Returns the components implementing interface, and if recurse, any of the descendants of interface. If abstract is True, also return the abstract implementations.""" result = [] find_implementations(interface, recurse, abstract, result) return result
[ "def", "implementations", "(", "interface", ",", "recurse", "=", "True", ",", "abstract", "=", "False", ")", ":", "result", "=", "[", "]", "find_implementations", "(", "interface", ",", "recurse", ",", "abstract", ",", "result", ")", "return", "result" ]
Returns the components implementing interface, and if recurse, any of the descendants of interface. If abstract is True, also return the abstract implementations.
[ "Returns", "the", "components", "implementing", "interface", "and", "if", "recurse", "any", "of", "the", "descendants", "of", "interface", ".", "If", "abstract", "is", "True", "also", "return", "the", "abstract", "implementations", "." ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/component.py#L65-L71
train
Parisson/TimeSide
timeside/core/component.py
find_implementations
def find_implementations(interface, recurse, abstract, result): """Find implementations of an interface or of one of its descendants and extend result with the classes found.""" for item in MetaComponent.implementations: if (item['interface'] == interface and (abstract or not item['abstract'])): extend_unique(result, [item['class']]) if recurse: subinterfaces = interface.__subclasses__() if subinterfaces: for i in subinterfaces: find_implementations(i, recurse, abstract, result)
python
def find_implementations(interface, recurse, abstract, result): """Find implementations of an interface or of one of its descendants and extend result with the classes found.""" for item in MetaComponent.implementations: if (item['interface'] == interface and (abstract or not item['abstract'])): extend_unique(result, [item['class']]) if recurse: subinterfaces = interface.__subclasses__() if subinterfaces: for i in subinterfaces: find_implementations(i, recurse, abstract, result)
[ "def", "find_implementations", "(", "interface", ",", "recurse", ",", "abstract", ",", "result", ")", ":", "for", "item", "in", "MetaComponent", ".", "implementations", ":", "if", "(", "item", "[", "'interface'", "]", "==", "interface", "and", "(", "abstract", "or", "not", "item", "[", "'abstract'", "]", ")", ")", ":", "extend_unique", "(", "result", ",", "[", "item", "[", "'class'", "]", "]", ")", "if", "recurse", ":", "subinterfaces", "=", "interface", ".", "__subclasses__", "(", ")", "if", "subinterfaces", ":", "for", "i", "in", "subinterfaces", ":", "find_implementations", "(", "i", ",", "recurse", ",", "abstract", ",", "result", ")" ]
Find implementations of an interface or of one of its descendants and extend result with the classes found.
[ "Find", "implementations", "of", "an", "interface", "or", "of", "one", "of", "its", "descendants", "and", "extend", "result", "with", "the", "classes", "found", "." ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/component.py#L141-L152
train
Parisson/TimeSide
timeside/core/grapher.py
Grapher.draw_peaks
def draw_peaks(self, x, peaks, line_color): """Draw 2 peaks at x""" y1 = self.image_height * 0.5 - peaks[0] * (self.image_height - 4) * 0.5 y2 = self.image_height * 0.5 - peaks[1] * (self.image_height - 4) * 0.5 if self.previous_y: self.draw.line( [self.previous_x, self.previous_y, x, y1, x, y2], line_color) else: self.draw.line([x, y1, x, y2], line_color) self.draw_anti_aliased_pixels(x, y1, y2, line_color) self.previous_x, self.previous_y = x, y2
python
def draw_peaks(self, x, peaks, line_color): """Draw 2 peaks at x""" y1 = self.image_height * 0.5 - peaks[0] * (self.image_height - 4) * 0.5 y2 = self.image_height * 0.5 - peaks[1] * (self.image_height - 4) * 0.5 if self.previous_y: self.draw.line( [self.previous_x, self.previous_y, x, y1, x, y2], line_color) else: self.draw.line([x, y1, x, y2], line_color) self.draw_anti_aliased_pixels(x, y1, y2, line_color) self.previous_x, self.previous_y = x, y2
[ "def", "draw_peaks", "(", "self", ",", "x", ",", "peaks", ",", "line_color", ")", ":", "y1", "=", "self", ".", "image_height", "*", "0.5", "-", "peaks", "[", "0", "]", "*", "(", "self", ".", "image_height", "-", "4", ")", "*", "0.5", "y2", "=", "self", ".", "image_height", "*", "0.5", "-", "peaks", "[", "1", "]", "*", "(", "self", ".", "image_height", "-", "4", ")", "*", "0.5", "if", "self", ".", "previous_y", ":", "self", ".", "draw", ".", "line", "(", "[", "self", ".", "previous_x", ",", "self", ".", "previous_y", ",", "x", ",", "y1", ",", "x", ",", "y2", "]", ",", "line_color", ")", "else", ":", "self", ".", "draw", ".", "line", "(", "[", "x", ",", "y1", ",", "x", ",", "y2", "]", ",", "line_color", ")", "self", ".", "draw_anti_aliased_pixels", "(", "x", ",", "y1", ",", "y2", ",", "line_color", ")", "self", ".", "previous_x", ",", "self", ".", "previous_y", "=", "x", ",", "y2" ]
Draw 2 peaks at x
[ "Draw", "2", "peaks", "at", "x" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/grapher.py#L193-L206
train
Parisson/TimeSide
timeside/core/grapher.py
Grapher.draw_peaks_inverted
def draw_peaks_inverted(self, x, peaks, line_color): """Draw 2 inverted peaks at x""" y1 = self.image_height * 0.5 - peaks[0] * (self.image_height - 4) * 0.5 y2 = self.image_height * 0.5 - peaks[1] * (self.image_height - 4) * 0.5 if self.previous_y and x < self.image_width - 1: if y1 < y2: self.draw.line((x, 0, x, y1), line_color) self.draw.line((x, self.image_height, x, y2), line_color) else: self.draw.line((x, 0, x, y2), line_color) self.draw.line((x, self.image_height, x, y1), line_color) else: self.draw.line((x, 0, x, self.image_height), line_color) self.draw_anti_aliased_pixels(x, y1, y2, line_color) self.previous_x, self.previous_y = x, y1
python
def draw_peaks_inverted(self, x, peaks, line_color): """Draw 2 inverted peaks at x""" y1 = self.image_height * 0.5 - peaks[0] * (self.image_height - 4) * 0.5 y2 = self.image_height * 0.5 - peaks[1] * (self.image_height - 4) * 0.5 if self.previous_y and x < self.image_width - 1: if y1 < y2: self.draw.line((x, 0, x, y1), line_color) self.draw.line((x, self.image_height, x, y2), line_color) else: self.draw.line((x, 0, x, y2), line_color) self.draw.line((x, self.image_height, x, y1), line_color) else: self.draw.line((x, 0, x, self.image_height), line_color) self.draw_anti_aliased_pixels(x, y1, y2, line_color) self.previous_x, self.previous_y = x, y1
[ "def", "draw_peaks_inverted", "(", "self", ",", "x", ",", "peaks", ",", "line_color", ")", ":", "y1", "=", "self", ".", "image_height", "*", "0.5", "-", "peaks", "[", "0", "]", "*", "(", "self", ".", "image_height", "-", "4", ")", "*", "0.5", "y2", "=", "self", ".", "image_height", "*", "0.5", "-", "peaks", "[", "1", "]", "*", "(", "self", ".", "image_height", "-", "4", ")", "*", "0.5", "if", "self", ".", "previous_y", "and", "x", "<", "self", ".", "image_width", "-", "1", ":", "if", "y1", "<", "y2", ":", "self", ".", "draw", ".", "line", "(", "(", "x", ",", "0", ",", "x", ",", "y1", ")", ",", "line_color", ")", "self", ".", "draw", ".", "line", "(", "(", "x", ",", "self", ".", "image_height", ",", "x", ",", "y2", ")", ",", "line_color", ")", "else", ":", "self", ".", "draw", ".", "line", "(", "(", "x", ",", "0", ",", "x", ",", "y2", ")", ",", "line_color", ")", "self", ".", "draw", ".", "line", "(", "(", "x", ",", "self", ".", "image_height", ",", "x", ",", "y1", ")", ",", "line_color", ")", "else", ":", "self", ".", "draw", ".", "line", "(", "(", "x", ",", "0", ",", "x", ",", "self", ".", "image_height", ")", ",", "line_color", ")", "self", ".", "draw_anti_aliased_pixels", "(", "x", ",", "y1", ",", "y2", ",", "line_color", ")", "self", ".", "previous_x", ",", "self", ".", "previous_y", "=", "x", ",", "y1" ]
Draw 2 inverted peaks at x
[ "Draw", "2", "inverted", "peaks", "at", "x" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/grapher.py#L208-L224
train
Parisson/TimeSide
timeside/core/grapher.py
Grapher.draw_anti_aliased_pixels
def draw_anti_aliased_pixels(self, x, y1, y2, color): """ vertical anti-aliasing at y1 and y2 """ y_max = max(y1, y2) y_max_int = int(y_max) alpha = y_max - y_max_int if alpha > 0.0 and alpha < 1.0 and y_max_int + 1 < self.image_height: current_pix = self.pixel[int(x), y_max_int + 1] r = int((1 - alpha) * current_pix[0] + alpha * color[0]) g = int((1 - alpha) * current_pix[1] + alpha * color[1]) b = int((1 - alpha) * current_pix[2] + alpha * color[2]) self.pixel[x, y_max_int + 1] = (r, g, b) y_min = min(y1, y2) y_min_int = int(y_min) alpha = 1.0 - (y_min - y_min_int) if alpha > 0.0 and alpha < 1.0 and y_min_int - 1 >= 0: current_pix = self.pixel[x, y_min_int - 1] r = int((1 - alpha) * current_pix[0] + alpha * color[0]) g = int((1 - alpha) * current_pix[1] + alpha * color[1]) b = int((1 - alpha) * current_pix[2] + alpha * color[2]) self.pixel[x, y_min_int - 1] = (r, g, b)
python
def draw_anti_aliased_pixels(self, x, y1, y2, color): """ vertical anti-aliasing at y1 and y2 """ y_max = max(y1, y2) y_max_int = int(y_max) alpha = y_max - y_max_int if alpha > 0.0 and alpha < 1.0 and y_max_int + 1 < self.image_height: current_pix = self.pixel[int(x), y_max_int + 1] r = int((1 - alpha) * current_pix[0] + alpha * color[0]) g = int((1 - alpha) * current_pix[1] + alpha * color[1]) b = int((1 - alpha) * current_pix[2] + alpha * color[2]) self.pixel[x, y_max_int + 1] = (r, g, b) y_min = min(y1, y2) y_min_int = int(y_min) alpha = 1.0 - (y_min - y_min_int) if alpha > 0.0 and alpha < 1.0 and y_min_int - 1 >= 0: current_pix = self.pixel[x, y_min_int - 1] r = int((1 - alpha) * current_pix[0] + alpha * color[0]) g = int((1 - alpha) * current_pix[1] + alpha * color[1]) b = int((1 - alpha) * current_pix[2] + alpha * color[2]) self.pixel[x, y_min_int - 1] = (r, g, b)
[ "def", "draw_anti_aliased_pixels", "(", "self", ",", "x", ",", "y1", ",", "y2", ",", "color", ")", ":", "y_max", "=", "max", "(", "y1", ",", "y2", ")", "y_max_int", "=", "int", "(", "y_max", ")", "alpha", "=", "y_max", "-", "y_max_int", "if", "alpha", ">", "0.0", "and", "alpha", "<", "1.0", "and", "y_max_int", "+", "1", "<", "self", ".", "image_height", ":", "current_pix", "=", "self", ".", "pixel", "[", "int", "(", "x", ")", ",", "y_max_int", "+", "1", "]", "r", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "0", "]", "+", "alpha", "*", "color", "[", "0", "]", ")", "g", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "1", "]", "+", "alpha", "*", "color", "[", "1", "]", ")", "b", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "2", "]", "+", "alpha", "*", "color", "[", "2", "]", ")", "self", ".", "pixel", "[", "x", ",", "y_max_int", "+", "1", "]", "=", "(", "r", ",", "g", ",", "b", ")", "y_min", "=", "min", "(", "y1", ",", "y2", ")", "y_min_int", "=", "int", "(", "y_min", ")", "alpha", "=", "1.0", "-", "(", "y_min", "-", "y_min_int", ")", "if", "alpha", ">", "0.0", "and", "alpha", "<", "1.0", "and", "y_min_int", "-", "1", ">=", "0", ":", "current_pix", "=", "self", ".", "pixel", "[", "x", ",", "y_min_int", "-", "1", "]", "r", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "0", "]", "+", "alpha", "*", "color", "[", "0", "]", ")", "g", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "1", "]", "+", "alpha", "*", "color", "[", "1", "]", ")", "b", "=", "int", "(", "(", "1", "-", "alpha", ")", "*", "current_pix", "[", "2", "]", "+", "alpha", "*", "color", "[", "2", "]", ")", "self", ".", "pixel", "[", "x", ",", "y_min_int", "-", "1", "]", "=", "(", "r", ",", "g", ",", "b", ")" ]
vertical anti-aliasing at y1 and y2
[ "vertical", "anti", "-", "aliasing", "at", "y1", "and", "y2" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/grapher.py#L226-L249
train
Parisson/TimeSide
timeside/plugins/grapher/spectrogram_log.py
SpectrogramLog.post_process
def post_process(self): """ Apply last 2D transforms""" self.image.putdata(self.pixels) self.image = self.image.transpose(Image.ROTATE_90)
python
def post_process(self): """ Apply last 2D transforms""" self.image.putdata(self.pixels) self.image = self.image.transpose(Image.ROTATE_90)
[ "def", "post_process", "(", "self", ")", ":", "self", ".", "image", ".", "putdata", "(", "self", ".", "pixels", ")", "self", ".", "image", "=", "self", ".", "image", ".", "transpose", "(", "Image", ".", "ROTATE_90", ")" ]
Apply last 2D transforms
[ "Apply", "last", "2D", "transforms" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/grapher/spectrogram_log.py#L105-L108
train
Parisson/TimeSide
timeside/plugins/encoder/mp3.py
Mp3Encoder.write_metadata
def write_metadata(self): """Write all ID3v2.4 tags to file from self.metadata""" import mutagen from mutagen import id3 id3 = id3.ID3(self.filename) for tag in self.metadata.keys(): value = self.metadata[tag] frame = mutagen.id3.Frames[tag](3, value) try: id3.add(frame) except: raise IOError('EncoderError: cannot tag "' + tag + '"') try: id3.save() except: raise IOError('EncoderError: cannot write tags')
python
def write_metadata(self): """Write all ID3v2.4 tags to file from self.metadata""" import mutagen from mutagen import id3 id3 = id3.ID3(self.filename) for tag in self.metadata.keys(): value = self.metadata[tag] frame = mutagen.id3.Frames[tag](3, value) try: id3.add(frame) except: raise IOError('EncoderError: cannot tag "' + tag + '"') try: id3.save() except: raise IOError('EncoderError: cannot write tags')
[ "def", "write_metadata", "(", "self", ")", ":", "import", "mutagen", "from", "mutagen", "import", "id3", "id3", "=", "id3", ".", "ID3", "(", "self", ".", "filename", ")", "for", "tag", "in", "self", ".", "metadata", ".", "keys", "(", ")", ":", "value", "=", "self", ".", "metadata", "[", "tag", "]", "frame", "=", "mutagen", ".", "id3", ".", "Frames", "[", "tag", "]", "(", "3", ",", "value", ")", "try", ":", "id3", ".", "add", "(", "frame", ")", "except", ":", "raise", "IOError", "(", "'EncoderError: cannot tag \"'", "+", "tag", "+", "'\"'", ")", "try", ":", "id3", ".", "save", "(", ")", "except", ":", "raise", "IOError", "(", "'EncoderError: cannot write tags'", ")" ]
Write all ID3v2.4 tags to file from self.metadata
[ "Write", "all", "ID3v2", ".", "4", "tags", "to", "file", "from", "self", ".", "metadata" ]
0618d75cd2f16021afcfd3d5b77f692adad76ea5
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/plugins/encoder/mp3.py#L94-L110
train
btimby/fulltext
fulltext/__main__.py
main
def main(args=sys.argv[1:]): """Extract text from a file. Commands: extract - extract text from path check - make sure all deps are installed Usage: fulltext extract [-v] [-f] <path>... fulltext check [-t] Options: -f, --file Open file first. -t, --title Check deps for title. -v, --verbose More verbose output. """ opt = docopt(main.__doc__.strip(), args, options_first=True) config_logging(opt['--verbose']) if opt['check']: check_backends(opt['--title']) elif opt['extract']: handler = fulltext.get if opt['--file']: handler = _handle_open for path in opt['<path>']: print(handler(path)) else: # we should never get here raise ValueError("don't know how to handle cmd")
python
def main(args=sys.argv[1:]): """Extract text from a file. Commands: extract - extract text from path check - make sure all deps are installed Usage: fulltext extract [-v] [-f] <path>... fulltext check [-t] Options: -f, --file Open file first. -t, --title Check deps for title. -v, --verbose More verbose output. """ opt = docopt(main.__doc__.strip(), args, options_first=True) config_logging(opt['--verbose']) if opt['check']: check_backends(opt['--title']) elif opt['extract']: handler = fulltext.get if opt['--file']: handler = _handle_open for path in opt['<path>']: print(handler(path)) else: # we should never get here raise ValueError("don't know how to handle cmd")
[ "def", "main", "(", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", ")", ":", "opt", "=", "docopt", "(", "main", ".", "__doc__", ".", "strip", "(", ")", ",", "args", ",", "options_first", "=", "True", ")", "config_logging", "(", "opt", "[", "'--verbose'", "]", ")", "if", "opt", "[", "'check'", "]", ":", "check_backends", "(", "opt", "[", "'--title'", "]", ")", "elif", "opt", "[", "'extract'", "]", ":", "handler", "=", "fulltext", ".", "get", "if", "opt", "[", "'--file'", "]", ":", "handler", "=", "_handle_open", "for", "path", "in", "opt", "[", "'<path>'", "]", ":", "print", "(", "handler", "(", "path", ")", ")", "else", ":", "# we should never get here", "raise", "ValueError", "(", "\"don't know how to handle cmd\"", ")" ]
Extract text from a file. Commands: extract - extract text from path check - make sure all deps are installed Usage: fulltext extract [-v] [-f] <path>... fulltext check [-t] Options: -f, --file Open file first. -t, --title Check deps for title. -v, --verbose More verbose output.
[ "Extract", "text", "from", "a", "file", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__main__.py#L71-L103
train
btimby/fulltext
fulltext/__init__.py
is_binary
def is_binary(f): """Return True if binary mode.""" # NOTE: order matters here. We don't bail on Python 2 just yet. Both # codecs.open() and io.open() can open in text mode, both set the encoding # attribute. We must do that check first. # If it has a decoding attribute with a value, it is text mode. if getattr(f, "encoding", None): return False # Python 2 makes no further distinction. if not PY3: return True # If the file has a mode, and it contains b, it is binary. try: if 'b' in getattr(f, 'mode', ''): return True except TypeError: import gzip if isinstance(f, gzip.GzipFile): return True # in gzip mode is an integer raise # Can we sniff? try: f.seek(0, os.SEEK_CUR) except (AttributeError, IOError): return False # Finally, let's sniff by reading a byte. byte = f.read(1) f.seek(-1, os.SEEK_CUR) return hasattr(byte, 'decode')
python
def is_binary(f): """Return True if binary mode.""" # NOTE: order matters here. We don't bail on Python 2 just yet. Both # codecs.open() and io.open() can open in text mode, both set the encoding # attribute. We must do that check first. # If it has a decoding attribute with a value, it is text mode. if getattr(f, "encoding", None): return False # Python 2 makes no further distinction. if not PY3: return True # If the file has a mode, and it contains b, it is binary. try: if 'b' in getattr(f, 'mode', ''): return True except TypeError: import gzip if isinstance(f, gzip.GzipFile): return True # in gzip mode is an integer raise # Can we sniff? try: f.seek(0, os.SEEK_CUR) except (AttributeError, IOError): return False # Finally, let's sniff by reading a byte. byte = f.read(1) f.seek(-1, os.SEEK_CUR) return hasattr(byte, 'decode')
[ "def", "is_binary", "(", "f", ")", ":", "# NOTE: order matters here. We don't bail on Python 2 just yet. Both", "# codecs.open() and io.open() can open in text mode, both set the encoding", "# attribute. We must do that check first.", "# If it has a decoding attribute with a value, it is text mode.", "if", "getattr", "(", "f", ",", "\"encoding\"", ",", "None", ")", ":", "return", "False", "# Python 2 makes no further distinction.", "if", "not", "PY3", ":", "return", "True", "# If the file has a mode, and it contains b, it is binary.", "try", ":", "if", "'b'", "in", "getattr", "(", "f", ",", "'mode'", ",", "''", ")", ":", "return", "True", "except", "TypeError", ":", "import", "gzip", "if", "isinstance", "(", "f", ",", "gzip", ".", "GzipFile", ")", ":", "return", "True", "# in gzip mode is an integer", "raise", "# Can we sniff?", "try", ":", "f", ".", "seek", "(", "0", ",", "os", ".", "SEEK_CUR", ")", "except", "(", "AttributeError", ",", "IOError", ")", ":", "return", "False", "# Finally, let's sniff by reading a byte.", "byte", "=", "f", ".", "read", "(", "1", ")", "f", ".", "seek", "(", "-", "1", ",", "os", ".", "SEEK_CUR", ")", "return", "hasattr", "(", "byte", ",", "'decode'", ")" ]
Return True if binary mode.
[ "Return", "True", "if", "binary", "mode", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L329-L362
train
btimby/fulltext
fulltext/__init__.py
handle_path
def handle_path(backend_inst, path, **kwargs): """ Handle a path. Called by `get()` when provided a path. This function will prefer the backend's `handle_path()` if one is provided Otherwise, it will open the given path then use `handle_fobj()`. """ if callable(getattr(backend_inst, 'handle_path', None)): # Prefer handle_path() if present. LOGGER.debug("using handle_path") return backend_inst.handle_path(path) elif callable(getattr(backend_inst, 'handle_fobj', None)): # Fallback to handle_fobj(). No warning here since the performance hit # is minimal. LOGGER.debug("using handle_fobj") with open(path, 'rb') as f: return backend_inst.handle_fobj(f) else: raise AssertionError( 'Backend %s has no _get functions' % backend_inst.__name__)
python
def handle_path(backend_inst, path, **kwargs): """ Handle a path. Called by `get()` when provided a path. This function will prefer the backend's `handle_path()` if one is provided Otherwise, it will open the given path then use `handle_fobj()`. """ if callable(getattr(backend_inst, 'handle_path', None)): # Prefer handle_path() if present. LOGGER.debug("using handle_path") return backend_inst.handle_path(path) elif callable(getattr(backend_inst, 'handle_fobj', None)): # Fallback to handle_fobj(). No warning here since the performance hit # is minimal. LOGGER.debug("using handle_fobj") with open(path, 'rb') as f: return backend_inst.handle_fobj(f) else: raise AssertionError( 'Backend %s has no _get functions' % backend_inst.__name__)
[ "def", "handle_path", "(", "backend_inst", ",", "path", ",", "*", "*", "kwargs", ")", ":", "if", "callable", "(", "getattr", "(", "backend_inst", ",", "'handle_path'", ",", "None", ")", ")", ":", "# Prefer handle_path() if present.", "LOGGER", ".", "debug", "(", "\"using handle_path\"", ")", "return", "backend_inst", ".", "handle_path", "(", "path", ")", "elif", "callable", "(", "getattr", "(", "backend_inst", ",", "'handle_fobj'", ",", "None", ")", ")", ":", "# Fallback to handle_fobj(). No warning here since the performance hit", "# is minimal.", "LOGGER", ".", "debug", "(", "\"using handle_fobj\"", ")", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "return", "backend_inst", ".", "handle_fobj", "(", "f", ")", "else", ":", "raise", "AssertionError", "(", "'Backend %s has no _get functions'", "%", "backend_inst", ".", "__name__", ")" ]
Handle a path. Called by `get()` when provided a path. This function will prefer the backend's `handle_path()` if one is provided Otherwise, it will open the given path then use `handle_fobj()`.
[ "Handle", "a", "path", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L365-L387
train
btimby/fulltext
fulltext/__init__.py
handle_fobj
def handle_fobj(backend, f, **kwargs): """ Handle a file-like object. Called by `get()` when provided a file-like. This function will prefer the backend's `handle_fobj()` if one is provided. Otherwise, it will write the data to a temporary file and call `handle_path()`. """ if not is_binary(f): raise AssertionError('File must be opened in binary mode.') if callable(getattr(backend, 'handle_fobj', None)): # Prefer handle_fobj() if present. LOGGER.debug("using handle_fobj") return backend.handle_fobj(f) elif callable(getattr(backend, 'handle_path', None)): # Fallback to handle_path(). Warn user since this is potentially # expensive. LOGGER.debug("using handle_path") LOGGER.warning( "Using disk, %r backend does not provide `handle_fobj()`", backend) ext = '' if 'ext' in kwargs: ext = '.' + kwargs['ext'] with fobj_to_tempfile(f, suffix=ext) as fname: return backend.handle_path(fname, **kwargs) else: raise AssertionError( 'Backend %s has no _get functions' % backend.__name__)
python
def handle_fobj(backend, f, **kwargs): """ Handle a file-like object. Called by `get()` when provided a file-like. This function will prefer the backend's `handle_fobj()` if one is provided. Otherwise, it will write the data to a temporary file and call `handle_path()`. """ if not is_binary(f): raise AssertionError('File must be opened in binary mode.') if callable(getattr(backend, 'handle_fobj', None)): # Prefer handle_fobj() if present. LOGGER.debug("using handle_fobj") return backend.handle_fobj(f) elif callable(getattr(backend, 'handle_path', None)): # Fallback to handle_path(). Warn user since this is potentially # expensive. LOGGER.debug("using handle_path") LOGGER.warning( "Using disk, %r backend does not provide `handle_fobj()`", backend) ext = '' if 'ext' in kwargs: ext = '.' + kwargs['ext'] with fobj_to_tempfile(f, suffix=ext) as fname: return backend.handle_path(fname, **kwargs) else: raise AssertionError( 'Backend %s has no _get functions' % backend.__name__)
[ "def", "handle_fobj", "(", "backend", ",", "f", ",", "*", "*", "kwargs", ")", ":", "if", "not", "is_binary", "(", "f", ")", ":", "raise", "AssertionError", "(", "'File must be opened in binary mode.'", ")", "if", "callable", "(", "getattr", "(", "backend", ",", "'handle_fobj'", ",", "None", ")", ")", ":", "# Prefer handle_fobj() if present.", "LOGGER", ".", "debug", "(", "\"using handle_fobj\"", ")", "return", "backend", ".", "handle_fobj", "(", "f", ")", "elif", "callable", "(", "getattr", "(", "backend", ",", "'handle_path'", ",", "None", ")", ")", ":", "# Fallback to handle_path(). Warn user since this is potentially", "# expensive.", "LOGGER", ".", "debug", "(", "\"using handle_path\"", ")", "LOGGER", ".", "warning", "(", "\"Using disk, %r backend does not provide `handle_fobj()`\"", ",", "backend", ")", "ext", "=", "''", "if", "'ext'", "in", "kwargs", ":", "ext", "=", "'.'", "+", "kwargs", "[", "'ext'", "]", "with", "fobj_to_tempfile", "(", "f", ",", "suffix", "=", "ext", ")", "as", "fname", ":", "return", "backend", ".", "handle_path", "(", "fname", ",", "*", "*", "kwargs", ")", "else", ":", "raise", "AssertionError", "(", "'Backend %s has no _get functions'", "%", "backend", ".", "__name__", ")" ]
Handle a file-like object. Called by `get()` when provided a file-like. This function will prefer the backend's `handle_fobj()` if one is provided. Otherwise, it will write the data to a temporary file and call `handle_path()`.
[ "Handle", "a", "file", "-", "like", "object", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L390-L421
train
btimby/fulltext
fulltext/__init__.py
backend_from_mime
def backend_from_mime(mime): """Determine backend module object from a mime string.""" try: mod_name = MIMETYPE_TO_BACKENDS[mime] except KeyError: msg = "No handler for %r, defaulting to %r" % (mime, DEFAULT_MIME) if 'FULLTEXT_TESTING' in os.environ: warn(msg) else: LOGGER.debug(msg) mod_name = MIMETYPE_TO_BACKENDS[DEFAULT_MIME] mod = import_mod(mod_name) return mod
python
def backend_from_mime(mime): """Determine backend module object from a mime string.""" try: mod_name = MIMETYPE_TO_BACKENDS[mime] except KeyError: msg = "No handler for %r, defaulting to %r" % (mime, DEFAULT_MIME) if 'FULLTEXT_TESTING' in os.environ: warn(msg) else: LOGGER.debug(msg) mod_name = MIMETYPE_TO_BACKENDS[DEFAULT_MIME] mod = import_mod(mod_name) return mod
[ "def", "backend_from_mime", "(", "mime", ")", ":", "try", ":", "mod_name", "=", "MIMETYPE_TO_BACKENDS", "[", "mime", "]", "except", "KeyError", ":", "msg", "=", "\"No handler for %r, defaulting to %r\"", "%", "(", "mime", ",", "DEFAULT_MIME", ")", "if", "'FULLTEXT_TESTING'", "in", "os", ".", "environ", ":", "warn", "(", "msg", ")", "else", ":", "LOGGER", ".", "debug", "(", "msg", ")", "mod_name", "=", "MIMETYPE_TO_BACKENDS", "[", "DEFAULT_MIME", "]", "mod", "=", "import_mod", "(", "mod_name", ")", "return", "mod" ]
Determine backend module object from a mime string.
[ "Determine", "backend", "module", "object", "from", "a", "mime", "string", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L428-L442
train
btimby/fulltext
fulltext/__init__.py
backend_from_fname
def backend_from_fname(name): """Determine backend module object from a file name.""" ext = splitext(name)[1] try: mime = EXTS_TO_MIMETYPES[ext] except KeyError: try: f = open(name, 'rb') except IOError as e: # The file may not exist, we are being asked to determine it's type # from it's name. Other errors are unexpected. if e.errno != errno.ENOENT: raise # We will have to fall back upon the default backend. msg = "No handler for %r, defaulting to %r" % (ext, DEFAULT_MIME) if 'FULLTEXT_TESTING' in os.environ: warn(msg) else: LOGGER.debug(msg) mod_name = MIMETYPE_TO_BACKENDS[DEFAULT_MIME] else: with f: return backend_from_fobj(f) else: mod_name = MIMETYPE_TO_BACKENDS[mime] mod = import_mod(mod_name) return mod
python
def backend_from_fname(name): """Determine backend module object from a file name.""" ext = splitext(name)[1] try: mime = EXTS_TO_MIMETYPES[ext] except KeyError: try: f = open(name, 'rb') except IOError as e: # The file may not exist, we are being asked to determine it's type # from it's name. Other errors are unexpected. if e.errno != errno.ENOENT: raise # We will have to fall back upon the default backend. msg = "No handler for %r, defaulting to %r" % (ext, DEFAULT_MIME) if 'FULLTEXT_TESTING' in os.environ: warn(msg) else: LOGGER.debug(msg) mod_name = MIMETYPE_TO_BACKENDS[DEFAULT_MIME] else: with f: return backend_from_fobj(f) else: mod_name = MIMETYPE_TO_BACKENDS[mime] mod = import_mod(mod_name) return mod
[ "def", "backend_from_fname", "(", "name", ")", ":", "ext", "=", "splitext", "(", "name", ")", "[", "1", "]", "try", ":", "mime", "=", "EXTS_TO_MIMETYPES", "[", "ext", "]", "except", "KeyError", ":", "try", ":", "f", "=", "open", "(", "name", ",", "'rb'", ")", "except", "IOError", "as", "e", ":", "# The file may not exist, we are being asked to determine it's type", "# from it's name. Other errors are unexpected.", "if", "e", ".", "errno", "!=", "errno", ".", "ENOENT", ":", "raise", "# We will have to fall back upon the default backend.", "msg", "=", "\"No handler for %r, defaulting to %r\"", "%", "(", "ext", ",", "DEFAULT_MIME", ")", "if", "'FULLTEXT_TESTING'", "in", "os", ".", "environ", ":", "warn", "(", "msg", ")", "else", ":", "LOGGER", ".", "debug", "(", "msg", ")", "mod_name", "=", "MIMETYPE_TO_BACKENDS", "[", "DEFAULT_MIME", "]", "else", ":", "with", "f", ":", "return", "backend_from_fobj", "(", "f", ")", "else", ":", "mod_name", "=", "MIMETYPE_TO_BACKENDS", "[", "mime", "]", "mod", "=", "import_mod", "(", "mod_name", ")", "return", "mod" ]
Determine backend module object from a file name.
[ "Determine", "backend", "module", "object", "from", "a", "file", "name", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L445-L479
train
btimby/fulltext
fulltext/__init__.py
backend_from_fobj
def backend_from_fobj(f): """Determine backend module object from a file object.""" if magic is None: warn("magic lib is not installed; assuming mime type %r" % ( DEFAULT_MIME)) return backend_from_mime(DEFAULT_MIME) else: offset = f.tell() try: f.seek(0) chunk = f.read(MAGIC_BUFFER_SIZE) mime = magic.from_buffer(chunk, mime=True) return backend_from_mime(mime) finally: f.seek(offset)
python
def backend_from_fobj(f): """Determine backend module object from a file object.""" if magic is None: warn("magic lib is not installed; assuming mime type %r" % ( DEFAULT_MIME)) return backend_from_mime(DEFAULT_MIME) else: offset = f.tell() try: f.seek(0) chunk = f.read(MAGIC_BUFFER_SIZE) mime = magic.from_buffer(chunk, mime=True) return backend_from_mime(mime) finally: f.seek(offset)
[ "def", "backend_from_fobj", "(", "f", ")", ":", "if", "magic", "is", "None", ":", "warn", "(", "\"magic lib is not installed; assuming mime type %r\"", "%", "(", "DEFAULT_MIME", ")", ")", "return", "backend_from_mime", "(", "DEFAULT_MIME", ")", "else", ":", "offset", "=", "f", ".", "tell", "(", ")", "try", ":", "f", ".", "seek", "(", "0", ")", "chunk", "=", "f", ".", "read", "(", "MAGIC_BUFFER_SIZE", ")", "mime", "=", "magic", ".", "from_buffer", "(", "chunk", ",", "mime", "=", "True", ")", "return", "backend_from_mime", "(", "mime", ")", "finally", ":", "f", ".", "seek", "(", "offset", ")" ]
Determine backend module object from a file object.
[ "Determine", "backend", "module", "object", "from", "a", "file", "object", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L482-L496
train
btimby/fulltext
fulltext/__init__.py
backend_inst_from_mod
def backend_inst_from_mod(mod, encoding, encoding_errors, kwargs): """Given a mod and a set of opts return an instantiated Backend class. """ kw = dict(encoding=encoding, encoding_errors=encoding_errors, kwargs=kwargs) try: klass = getattr(mod, "Backend") except AttributeError: raise AttributeError("%r mod does not define any backend class" % mod) inst = klass(**kw) try: inst.check(title=False) except Exception as err: bin_mod = "fulltext.backends.__bin" warn("can't use %r due to %r; use %r backend instead" % ( mod, str(err), bin_mod)) inst = import_mod(bin_mod).Backend(**kw) inst.check(title=False) LOGGER.debug("using %r" % inst) return inst
python
def backend_inst_from_mod(mod, encoding, encoding_errors, kwargs): """Given a mod and a set of opts return an instantiated Backend class. """ kw = dict(encoding=encoding, encoding_errors=encoding_errors, kwargs=kwargs) try: klass = getattr(mod, "Backend") except AttributeError: raise AttributeError("%r mod does not define any backend class" % mod) inst = klass(**kw) try: inst.check(title=False) except Exception as err: bin_mod = "fulltext.backends.__bin" warn("can't use %r due to %r; use %r backend instead" % ( mod, str(err), bin_mod)) inst = import_mod(bin_mod).Backend(**kw) inst.check(title=False) LOGGER.debug("using %r" % inst) return inst
[ "def", "backend_inst_from_mod", "(", "mod", ",", "encoding", ",", "encoding_errors", ",", "kwargs", ")", ":", "kw", "=", "dict", "(", "encoding", "=", "encoding", ",", "encoding_errors", "=", "encoding_errors", ",", "kwargs", "=", "kwargs", ")", "try", ":", "klass", "=", "getattr", "(", "mod", ",", "\"Backend\"", ")", "except", "AttributeError", ":", "raise", "AttributeError", "(", "\"%r mod does not define any backend class\"", "%", "mod", ")", "inst", "=", "klass", "(", "*", "*", "kw", ")", "try", ":", "inst", ".", "check", "(", "title", "=", "False", ")", "except", "Exception", "as", "err", ":", "bin_mod", "=", "\"fulltext.backends.__bin\"", "warn", "(", "\"can't use %r due to %r; use %r backend instead\"", "%", "(", "mod", ",", "str", "(", "err", ")", ",", "bin_mod", ")", ")", "inst", "=", "import_mod", "(", "bin_mod", ")", ".", "Backend", "(", "*", "*", "kw", ")", "inst", ".", "check", "(", "title", "=", "False", ")", "LOGGER", ".", "debug", "(", "\"using %r\"", "%", "inst", ")", "return", "inst" ]
Given a mod and a set of opts return an instantiated Backend class.
[ "Given", "a", "mod", "and", "a", "set", "of", "opts", "return", "an", "instantiated", "Backend", "class", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L499-L519
train
btimby/fulltext
fulltext/__init__.py
get
def get(path_or_file, default=SENTINAL, mime=None, name=None, backend=None, encoding=None, encoding_errors=None, kwargs=None, _wtitle=False): """ Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend. """ try: text, title = _get( path_or_file, default=default, mime=mime, name=name, backend=backend, kwargs=kwargs, encoding=encoding, encoding_errors=encoding_errors, _wtitle=_wtitle) if _wtitle: return (text, title) else: return text except Exception as e: if default is not SENTINAL: LOGGER.exception(e) return default raise
python
def get(path_or_file, default=SENTINAL, mime=None, name=None, backend=None, encoding=None, encoding_errors=None, kwargs=None, _wtitle=False): """ Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend. """ try: text, title = _get( path_or_file, default=default, mime=mime, name=name, backend=backend, kwargs=kwargs, encoding=encoding, encoding_errors=encoding_errors, _wtitle=_wtitle) if _wtitle: return (text, title) else: return text except Exception as e: if default is not SENTINAL: LOGGER.exception(e) return default raise
[ "def", "get", "(", "path_or_file", ",", "default", "=", "SENTINAL", ",", "mime", "=", "None", ",", "name", "=", "None", ",", "backend", "=", "None", ",", "encoding", "=", "None", ",", "encoding_errors", "=", "None", ",", "kwargs", "=", "None", ",", "_wtitle", "=", "False", ")", ":", "try", ":", "text", ",", "title", "=", "_get", "(", "path_or_file", ",", "default", "=", "default", ",", "mime", "=", "mime", ",", "name", "=", "name", ",", "backend", "=", "backend", ",", "kwargs", "=", "kwargs", ",", "encoding", "=", "encoding", ",", "encoding_errors", "=", "encoding_errors", ",", "_wtitle", "=", "_wtitle", ")", "if", "_wtitle", ":", "return", "(", "text", ",", "title", ")", "else", ":", "return", "text", "except", "Exception", "as", "e", ":", "if", "default", "is", "not", "SENTINAL", ":", "LOGGER", ".", "exception", "(", "e", ")", "return", "default", "raise" ]
Get document full text. Accepts a path or file-like object. * If given, `default` is returned instead of an error. * `backend` is either a module object or a string specifying which default backend to use (e.g. "doc"); take a look at backends directory to see a list of default backends. * `mime` and `name` should be passed if the information is available to caller, otherwise a best guess is made. If both are specified `mime` takes precedence. * `encoding` and `encoding_errors` are used to handle text encoding. They are taken into consideration mostly only by pure-python backends which do not rely on CLI tools. Default to "utf8" and "strict" respectively. * `kwargs` are passed to the underlying backend.
[ "Get", "document", "full", "text", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/__init__.py#L585-L618
train
btimby/fulltext
fulltext/util.py
hilite
def hilite(s, ok=True, bold=False): """Return an highlighted version of 'string'.""" if not term_supports_colors(): return s attr = [] if ok is None: # no color pass elif ok: # green attr.append('32') else: # red attr.append('31') if bold: attr.append('1') return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
python
def hilite(s, ok=True, bold=False): """Return an highlighted version of 'string'.""" if not term_supports_colors(): return s attr = [] if ok is None: # no color pass elif ok: # green attr.append('32') else: # red attr.append('31') if bold: attr.append('1') return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
[ "def", "hilite", "(", "s", ",", "ok", "=", "True", ",", "bold", "=", "False", ")", ":", "if", "not", "term_supports_colors", "(", ")", ":", "return", "s", "attr", "=", "[", "]", "if", "ok", "is", "None", ":", "# no color", "pass", "elif", "ok", ":", "# green", "attr", ".", "append", "(", "'32'", ")", "else", ":", "# red", "attr", ".", "append", "(", "'31'", ")", "if", "bold", ":", "attr", ".", "append", "(", "'1'", ")", "return", "'\\x1b[%sm%s\\x1b[0m'", "%", "(", "';'", ".", "join", "(", "attr", ")", ",", "s", ")" ]
Return an highlighted version of 'string'.
[ "Return", "an", "highlighted", "version", "of", "string", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/util.py#L254-L267
train
btimby/fulltext
fulltext/util.py
fobj_to_tempfile
def fobj_to_tempfile(f, suffix=''): """Context manager which copies a file object to disk and return its name. When done the file is deleted. """ with tempfile.NamedTemporaryFile( dir=TEMPDIR, suffix=suffix, delete=False) as t: shutil.copyfileobj(f, t) try: yield t.name finally: os.remove(t.name)
python
def fobj_to_tempfile(f, suffix=''): """Context manager which copies a file object to disk and return its name. When done the file is deleted. """ with tempfile.NamedTemporaryFile( dir=TEMPDIR, suffix=suffix, delete=False) as t: shutil.copyfileobj(f, t) try: yield t.name finally: os.remove(t.name)
[ "def", "fobj_to_tempfile", "(", "f", ",", "suffix", "=", "''", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "dir", "=", "TEMPDIR", ",", "suffix", "=", "suffix", ",", "delete", "=", "False", ")", "as", "t", ":", "shutil", ".", "copyfileobj", "(", "f", ",", "t", ")", "try", ":", "yield", "t", ".", "name", "finally", ":", "os", ".", "remove", "(", "t", ".", "name", ")" ]
Context manager which copies a file object to disk and return its name. When done the file is deleted.
[ "Context", "manager", "which", "copies", "a", "file", "object", "to", "disk", "and", "return", "its", "name", ".", "When", "done", "the", "file", "is", "deleted", "." ]
9234cc1e2099209430e20317649549026de283ce
https://github.com/btimby/fulltext/blob/9234cc1e2099209430e20317649549026de283ce/fulltext/util.py#L308-L318
train