repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
gamechanger/schemer
schemer/validators.py
lte
def lte(max_value): """ Validates that a field value is less than or equal to the value given to this validator. """ def validate(value): if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
python
def lte(max_value): """ Validates that a field value is less than or equal to the value given to this validator. """ def validate(value): if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
[ "def", "lte", "(", "max_value", ")", ":", "def", "validate", "(", "value", ")", ":", "if", "value", ">", "max_value", ":", "return", "e", "(", "\"{} is not less than or equal to {}\"", ",", "value", ",", "max_value", ")", "return", "validate" ]
Validates that a field value is less than or equal to the value given to this validator.
[ "Validates", "that", "a", "field", "value", "is", "less", "than", "or", "equal", "to", "the", "value", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L35-L43
gamechanger/schemer
schemer/validators.py
gt
def gt(gt_value): """ Validates that a field value is greater than the value given to this validator. """ def validate(value): if value <= gt_value: return e("{} is not greater than {}", value, gt_value) return validate
python
def gt(gt_value): """ Validates that a field value is greater than the value given to this validator. """ def validate(value): if value <= gt_value: return e("{} is not greater than {}", value, gt_value) return validate
[ "def", "gt", "(", "gt_value", ")", ":", "def", "validate", "(", "value", ")", ":", "if", "value", "<=", "gt_value", ":", "return", "e", "(", "\"{} is not greater than {}\"", ",", "value", ",", "gt_value", ")", "return", "validate" ]
Validates that a field value is greater than the value given to this validator.
[ "Validates", "that", "a", "field", "value", "is", "greater", "than", "the", "value", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L46-L54
gamechanger/schemer
schemer/validators.py
lt
def lt(lt_value): """ Validates that a field value is less than the value given to this validator. """ def validate(value): if value >= lt_value: return e("{} is not less than {}", value, lt_value) return validate
python
def lt(lt_value): """ Validates that a field value is less than the value given to this validator. """ def validate(value): if value >= lt_value: return e("{} is not less than {}", value, lt_value) return validate
[ "def", "lt", "(", "lt_value", ")", ":", "def", "validate", "(", "value", ")", ":", "if", "value", ">=", "lt_value", ":", "return", "e", "(", "\"{} is not less than {}\"", ",", "value", ",", "lt_value", ")", "return", "validate" ]
Validates that a field value is less than the value given to this validator.
[ "Validates", "that", "a", "field", "value", "is", "less", "than", "the", "value", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L57-L65
gamechanger/schemer
schemer/validators.py
between
def between(min_value, max_value): """ Validates that a field value is between the two values given to this validator. """ def validate(value): if value < min_value: return e("{} is not greater than or equal to {}", value, min_value) if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
python
def between(min_value, max_value): """ Validates that a field value is between the two values given to this validator. """ def validate(value): if value < min_value: return e("{} is not greater than or equal to {}", value, min_value) if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
[ "def", "between", "(", "min_value", ",", "max_value", ")", ":", "def", "validate", "(", "value", ")", ":", "if", "value", "<", "min_value", ":", "return", "e", "(", "\"{} is not greater than or equal to {}\"", ",", "value", ",", "min_value", ")", "if", "value", ">", "max_value", ":", "return", "e", "(", "\"{} is not less than or equal to {}\"", ",", "value", ",", "max_value", ")", "return", "validate" ]
Validates that a field value is between the two values given to this validator.
[ "Validates", "that", "a", "field", "value", "is", "between", "the", "two", "values", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L68-L80
gamechanger/schemer
schemer/validators.py
length
def length(min=None, max=None): """ Validates that a field value's length is between the bounds given to this validator. """ def validate(value): if min and len(value) < min: return e("{} does not have a length of at least {}", value, min) if max and len(value) > max: return e("{} does not have a length of at most {}", value, max) return validate
python
def length(min=None, max=None): """ Validates that a field value's length is between the bounds given to this validator. """ def validate(value): if min and len(value) < min: return e("{} does not have a length of at least {}", value, min) if max and len(value) > max: return e("{} does not have a length of at most {}", value, max) return validate
[ "def", "length", "(", "min", "=", "None", ",", "max", "=", "None", ")", ":", "def", "validate", "(", "value", ")", ":", "if", "min", "and", "len", "(", "value", ")", "<", "min", ":", "return", "e", "(", "\"{} does not have a length of at least {}\"", ",", "value", ",", "min", ")", "if", "max", "and", "len", "(", "value", ")", ">", "max", ":", "return", "e", "(", "\"{} does not have a length of at most {}\"", ",", "value", ",", "max", ")", "return", "validate" ]
Validates that a field value's length is between the bounds given to this validator.
[ "Validates", "that", "a", "field", "value", "s", "length", "is", "between", "the", "bounds", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L83-L94
gamechanger/schemer
schemer/validators.py
match
def match(pattern): """ Validates that a field value matches the regex given to this validator. """ regex = re.compile(pattern) def validate(value): if not regex.match(value): return e("{} does not match the pattern {}", value, pattern) return validate
python
def match(pattern): """ Validates that a field value matches the regex given to this validator. """ regex = re.compile(pattern) def validate(value): if not regex.match(value): return e("{} does not match the pattern {}", value, pattern) return validate
[ "def", "match", "(", "pattern", ")", ":", "regex", "=", "re", ".", "compile", "(", "pattern", ")", "def", "validate", "(", "value", ")", ":", "if", "not", "regex", ".", "match", "(", "value", ")", ":", "return", "e", "(", "\"{} does not match the pattern {}\"", ",", "value", ",", "pattern", ")", "return", "validate" ]
Validates that a field value matches the regex given to this validator.
[ "Validates", "that", "a", "field", "value", "matches", "the", "regex", "given", "to", "this", "validator", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L97-L106
gamechanger/schemer
schemer/validators.py
is_email
def is_email(): """ Validates that a fields value is a valid email address. """ email = ( ur'(?!^\.)' # No dot at start ur'(?!.*\.@)' # No dot before at sign ur'(?!.*@\.)' # No dot after at sign ur'(?!.*\.$)' # No dot at the end ur'(?!.*\.\.)' # No double dots anywhere ur'^\S+' # Starts with one or more non-whitespace characters ur'@' # Contains an at sign ur'\S+$' # Ends with one or more non-whitespace characters ) regex = re.compile(email, re.IGNORECASE | re.UNICODE) def validate(value): if not regex.match(value): return e("{} is not a valid email address", value) return validate
python
def is_email(): """ Validates that a fields value is a valid email address. """ email = ( ur'(?!^\.)' # No dot at start ur'(?!.*\.@)' # No dot before at sign ur'(?!.*@\.)' # No dot after at sign ur'(?!.*\.$)' # No dot at the end ur'(?!.*\.\.)' # No double dots anywhere ur'^\S+' # Starts with one or more non-whitespace characters ur'@' # Contains an at sign ur'\S+$' # Ends with one or more non-whitespace characters ) regex = re.compile(email, re.IGNORECASE | re.UNICODE) def validate(value): if not regex.match(value): return e("{} is not a valid email address", value) return validate
[ "def", "is_email", "(", ")", ":", "email", "=", "(", "ur'(?!^\\.)'", "# No dot at start", "ur'(?!.*\\.@)'", "# No dot before at sign", "ur'(?!.*@\\.)'", "# No dot after at sign", "ur'(?!.*\\.$)'", "# No dot at the end", "ur'(?!.*\\.\\.)'", "# No double dots anywhere", "ur'^\\S+'", "# Starts with one or more non-whitespace characters", "ur'@'", "# Contains an at sign", "ur'\\S+$'", "# Ends with one or more non-whitespace characters", ")", "regex", "=", "re", ".", "compile", "(", "email", ",", "re", ".", "IGNORECASE", "|", "re", ".", "UNICODE", ")", "def", "validate", "(", "value", ")", ":", "if", "not", "regex", ".", "match", "(", "value", ")", ":", "return", "e", "(", "\"{} is not a valid email address\"", ",", "value", ")", "return", "validate" ]
Validates that a fields value is a valid email address.
[ "Validates", "that", "a", "fields", "value", "is", "a", "valid", "email", "address", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L108-L129
gamechanger/schemer
schemer/validators.py
is_url
def is_url(): """ Validates that a fields value is a valid URL. """ # Stolen from Django regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) def validate(value): if not regex.match(value): return e("{} is not a valid URL", value) return validate
python
def is_url(): """ Validates that a fields value is a valid URL. """ # Stolen from Django regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) def validate(value): if not regex.match(value): return e("{} is not a valid URL", value) return validate
[ "def", "is_url", "(", ")", ":", "# Stolen from Django", "regex", "=", "re", ".", "compile", "(", "r'^(?:http|ftp)s?://'", "# http:// or https://", "r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|'", "#domain...", "r'localhost|'", "#localhost...", "r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})'", "# ...or ip", "r'(?::\\d+)?'", "# optional port", "r'(?:/?|[/?]\\S+)$'", ",", "re", ".", "IGNORECASE", ")", "def", "validate", "(", "value", ")", ":", "if", "not", "regex", ".", "match", "(", "value", ")", ":", "return", "e", "(", "\"{} is not a valid URL\"", ",", "value", ")", "return", "validate" ]
Validates that a fields value is a valid URL.
[ "Validates", "that", "a", "fields", "value", "is", "a", "valid", "URL", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L131-L147
gamechanger/schemer
schemer/validators.py
each_item
def each_item(*validators): """ A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))} """ def validate(value): for item in value: for validator in validators: error = validator(item) if error: return error return None return validate
python
def each_item(*validators): """ A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))} """ def validate(value): for item in value: for validator in validators: error = validator(item) if error: return error return None return validate
[ "def", "each_item", "(", "*", "validators", ")", ":", "def", "validate", "(", "value", ")", ":", "for", "item", "in", "value", ":", "for", "validator", "in", "validators", ":", "error", "=", "validator", "(", "item", ")", "if", "error", ":", "return", "error", "return", "None", "return", "validate" ]
A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))}
[ "A", "wrapper", "which", "applies", "the", "given", "validators", "to", "each", "item", "in", "a", "field", "value", "of", "type", "list", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L150-L166
gamechanger/schemer
schemer/validators.py
distinct
def distinct(): """ Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates. """ def validate(value): for i, item in enumerate(value): if item in value[i+1:]: return e("{} is not a distinct set of values", value) return validate
python
def distinct(): """ Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates. """ def validate(value): for i, item in enumerate(value): if item in value[i+1:]: return e("{} is not a distinct set of values", value) return validate
[ "def", "distinct", "(", ")", ":", "def", "validate", "(", "value", ")", ":", "for", "i", ",", "item", "in", "enumerate", "(", "value", ")", ":", "if", "item", "in", "value", "[", "i", "+", "1", ":", "]", ":", "return", "e", "(", "\"{} is not a distinct set of values\"", ",", "value", ")", "return", "validate" ]
Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates.
[ "Validates", "that", "all", "items", "in", "the", "given", "field", "list", "value", "are", "distinct", "i", ".", "e", ".", "that", "the", "list", "contains", "no", "duplicates", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/validators.py#L169-L178
gamechanger/schemer
schemer/__init__.py
Schema.apply_defaults
def apply_defaults(self, instance): """Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.""" for field, spec in self.doc_spec.iteritems(): field_type = spec['type'] if field not in instance: if 'default' in spec: default = spec['default'] if callable(default): instance[field] = default() else: instance[field] = copy.deepcopy(default) # Determine if a value already exists for the field if field in instance: value = instance[field] # recurse into nested docs if isinstance(field_type, Schema) and isinstance(value, dict): field_type.apply_defaults(value) elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list): for item in value: field_type.contained_type.apply_defaults(item)
python
def apply_defaults(self, instance): """Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.""" for field, spec in self.doc_spec.iteritems(): field_type = spec['type'] if field not in instance: if 'default' in spec: default = spec['default'] if callable(default): instance[field] = default() else: instance[field] = copy.deepcopy(default) # Determine if a value already exists for the field if field in instance: value = instance[field] # recurse into nested docs if isinstance(field_type, Schema) and isinstance(value, dict): field_type.apply_defaults(value) elif isinstance(field_type, Array) and isinstance(field_type.contained_type, Schema) and isinstance(value, list): for item in value: field_type.contained_type.apply_defaults(item)
[ "def", "apply_defaults", "(", "self", ",", "instance", ")", ":", "for", "field", ",", "spec", "in", "self", ".", "doc_spec", ".", "iteritems", "(", ")", ":", "field_type", "=", "spec", "[", "'type'", "]", "if", "field", "not", "in", "instance", ":", "if", "'default'", "in", "spec", ":", "default", "=", "spec", "[", "'default'", "]", "if", "callable", "(", "default", ")", ":", "instance", "[", "field", "]", "=", "default", "(", ")", "else", ":", "instance", "[", "field", "]", "=", "copy", ".", "deepcopy", "(", "default", ")", "# Determine if a value already exists for the field", "if", "field", "in", "instance", ":", "value", "=", "instance", "[", "field", "]", "# recurse into nested docs", "if", "isinstance", "(", "field_type", ",", "Schema", ")", "and", "isinstance", "(", "value", ",", "dict", ")", ":", "field_type", ".", "apply_defaults", "(", "value", ")", "elif", "isinstance", "(", "field_type", ",", "Array", ")", "and", "isinstance", "(", "field_type", ".", "contained_type", ",", "Schema", ")", "and", "isinstance", "(", "value", ",", "list", ")", ":", "for", "item", "in", "value", ":", "field_type", ".", "contained_type", ".", "apply_defaults", "(", "item", ")" ]
Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.
[ "Applies", "the", "defaults", "described", "by", "the", "this", "schema", "to", "the", "given", "document", "instance", "as", "appropriate", ".", "Defaults", "are", "only", "applied", "to", "fields", "which", "are", "currently", "unset", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L26-L49
gamechanger/schemer
schemer/__init__.py
Schema.validate
def validate(self, instance): """Validates the given document against this schema. Raises a ValidationException if there are any failures.""" errors = {} self._validate_instance(instance, errors) if len(errors) > 0: raise ValidationException(errors)
python
def validate(self, instance): """Validates the given document against this schema. Raises a ValidationException if there are any failures.""" errors = {} self._validate_instance(instance, errors) if len(errors) > 0: raise ValidationException(errors)
[ "def", "validate", "(", "self", ",", "instance", ")", ":", "errors", "=", "{", "}", "self", ".", "_validate_instance", "(", "instance", ",", "errors", ")", "if", "len", "(", "errors", ")", ">", "0", ":", "raise", "ValidationException", "(", "errors", ")" ]
Validates the given document against this schema. Raises a ValidationException if there are any failures.
[ "Validates", "the", "given", "document", "against", "this", "schema", ".", "Raises", "a", "ValidationException", "if", "there", "are", "any", "failures", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L51-L58
gamechanger/schemer
schemer/__init__.py
Schema._verify
def _verify(self, path_prefix=None): """Verifies that this schema's doc spec is valid and makes sense.""" for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # Standard dict-based spec if isinstance(spec, dict): self._verify_field_spec(spec, path) else: raise SchemaFormatException("Invalid field definition for {}", path)
python
def _verify(self, path_prefix=None): """Verifies that this schema's doc spec is valid and makes sense.""" for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # Standard dict-based spec if isinstance(spec, dict): self._verify_field_spec(spec, path) else: raise SchemaFormatException("Invalid field definition for {}", path)
[ "def", "_verify", "(", "self", ",", "path_prefix", "=", "None", ")", ":", "for", "field", ",", "spec", "in", "self", ".", "doc_spec", ".", "iteritems", "(", ")", ":", "path", "=", "self", ".", "_append_path", "(", "path_prefix", ",", "field", ")", "# Standard dict-based spec", "if", "isinstance", "(", "spec", ",", "dict", ")", ":", "self", ".", "_verify_field_spec", "(", "spec", ",", "path", ")", "else", ":", "raise", "SchemaFormatException", "(", "\"Invalid field definition for {}\"", ",", "path", ")" ]
Verifies that this schema's doc spec is valid and makes sense.
[ "Verifies", "that", "this", "schema", "s", "doc", "spec", "is", "valid", "and", "makes", "sense", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L67-L76
gamechanger/schemer
schemer/__init__.py
Schema._verify_field_spec
def _verify_field_spec(self, spec, path): """Verifies a given field specification is valid, recursing into nested schemas if required.""" # Required should be a boolean if 'required' in spec and not isinstance(spec['required'], bool): raise SchemaFormatException("{} required declaration should be True or False", path) # Required should be a boolean if 'nullable' in spec and not isinstance(spec['nullable'], bool): raise SchemaFormatException("{} nullable declaration should be True or False", path) # Must have a type specified if 'type' not in spec: raise SchemaFormatException("{} has no type declared.", path) self._verify_type(spec, path) # Validations should be either a single function or array of functions if 'validates' in spec: self._verify_validates(spec, path) # Defaults must be of the correct type or a function if 'default' in spec: self._verify_default(spec, path) # Only expected spec keys are supported if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
python
def _verify_field_spec(self, spec, path): """Verifies a given field specification is valid, recursing into nested schemas if required.""" # Required should be a boolean if 'required' in spec and not isinstance(spec['required'], bool): raise SchemaFormatException("{} required declaration should be True or False", path) # Required should be a boolean if 'nullable' in spec and not isinstance(spec['nullable'], bool): raise SchemaFormatException("{} nullable declaration should be True or False", path) # Must have a type specified if 'type' not in spec: raise SchemaFormatException("{} has no type declared.", path) self._verify_type(spec, path) # Validations should be either a single function or array of functions if 'validates' in spec: self._verify_validates(spec, path) # Defaults must be of the correct type or a function if 'default' in spec: self._verify_default(spec, path) # Only expected spec keys are supported if not set(spec.keys()).issubset(set(['type', 'required', 'validates', 'default', 'nullable'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path)
[ "def", "_verify_field_spec", "(", "self", ",", "spec", ",", "path", ")", ":", "# Required should be a boolean", "if", "'required'", "in", "spec", "and", "not", "isinstance", "(", "spec", "[", "'required'", "]", ",", "bool", ")", ":", "raise", "SchemaFormatException", "(", "\"{} required declaration should be True or False\"", ",", "path", ")", "# Required should be a boolean", "if", "'nullable'", "in", "spec", "and", "not", "isinstance", "(", "spec", "[", "'nullable'", "]", ",", "bool", ")", ":", "raise", "SchemaFormatException", "(", "\"{} nullable declaration should be True or False\"", ",", "path", ")", "# Must have a type specified", "if", "'type'", "not", "in", "spec", ":", "raise", "SchemaFormatException", "(", "\"{} has no type declared.\"", ",", "path", ")", "self", ".", "_verify_type", "(", "spec", ",", "path", ")", "# Validations should be either a single function or array of functions", "if", "'validates'", "in", "spec", ":", "self", ".", "_verify_validates", "(", "spec", ",", "path", ")", "# Defaults must be of the correct type or a function", "if", "'default'", "in", "spec", ":", "self", ".", "_verify_default", "(", "spec", ",", "path", ")", "# Only expected spec keys are supported", "if", "not", "set", "(", "spec", ".", "keys", "(", ")", ")", ".", "issubset", "(", "set", "(", "[", "'type'", ",", "'required'", ",", "'validates'", ",", "'default'", ",", "'nullable'", "]", ")", ")", ":", "raise", "SchemaFormatException", "(", "\"Unsupported field spec item at {}. Items: \"", "+", "repr", "(", "spec", ".", "keys", "(", ")", ")", ",", "path", ")" ]
Verifies a given field specification is valid, recursing into nested schemas if required.
[ "Verifies", "a", "given", "field", "specification", "is", "valid", "recursing", "into", "nested", "schemas", "if", "required", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L79-L106
gamechanger/schemer
schemer/__init__.py
Schema._verify_type
def _verify_type(self, spec, path): """Verify that the 'type' in the spec is valid""" field_type = spec['type'] if isinstance(field_type, Schema): # Nested documents cannot have validation if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path) return elif isinstance(field_type, Array): if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)): raise SchemaFormatException("Unsupported field type contained by Array at {}.", path) elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType): raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
python
def _verify_type(self, spec, path): """Verify that the 'type' in the spec is valid""" field_type = spec['type'] if isinstance(field_type, Schema): # Nested documents cannot have validation if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default'])): raise SchemaFormatException("Unsupported field spec item at {}. Items: "+repr(spec.keys()), path) return elif isinstance(field_type, Array): if not isinstance(field_type.contained_type, (type, Schema, Array, types.FunctionType)): raise SchemaFormatException("Unsupported field type contained by Array at {}.", path) elif not isinstance(field_type, type) and not isinstance(field_type, types.FunctionType): raise SchemaFormatException("Unsupported field type at {}. Type must be a type, a function, an Array or another Schema", path)
[ "def", "_verify_type", "(", "self", ",", "spec", ",", "path", ")", ":", "field_type", "=", "spec", "[", "'type'", "]", "if", "isinstance", "(", "field_type", ",", "Schema", ")", ":", "# Nested documents cannot have validation", "if", "not", "set", "(", "spec", ".", "keys", "(", ")", ")", ".", "issubset", "(", "set", "(", "[", "'type'", ",", "'required'", ",", "'nullable'", ",", "'default'", "]", ")", ")", ":", "raise", "SchemaFormatException", "(", "\"Unsupported field spec item at {}. Items: \"", "+", "repr", "(", "spec", ".", "keys", "(", ")", ")", ",", "path", ")", "return", "elif", "isinstance", "(", "field_type", ",", "Array", ")", ":", "if", "not", "isinstance", "(", "field_type", ".", "contained_type", ",", "(", "type", ",", "Schema", ",", "Array", ",", "types", ".", "FunctionType", ")", ")", ":", "raise", "SchemaFormatException", "(", "\"Unsupported field type contained by Array at {}.\"", ",", "path", ")", "elif", "not", "isinstance", "(", "field_type", ",", "type", ")", "and", "not", "isinstance", "(", "field_type", ",", "types", ".", "FunctionType", ")", ":", "raise", "SchemaFormatException", "(", "\"Unsupported field type at {}. Type must be a type, a function, an Array or another Schema\"", ",", "path", ")" ]
Verify that the 'type' in the spec is valid
[ "Verify", "that", "the", "type", "in", "the", "spec", "is", "valid" ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L108-L123
gamechanger/schemer
schemer/__init__.py
Schema._verify_default
def _verify_default(self, spec, path): """Verifies that the default specified in the given spec is valid.""" field_type = spec['type'] default = spec['default'] # If it's a function there's nothing we can really do except assume its valid if callable(default): return if isinstance(field_type, Array): # Verify we'd got a list as our default if not isinstance(default, list): raise SchemaFormatException("Default value for Array at {} is not a list of values.", path) # Ensure the contents are of the correct type for i, item in enumerate(default): if isinstance(field_type.contained_type, Schema): if not self._valid_schema_default(item): raise SchemaFormatException("Default value for Schema is not valid.", path) elif not isinstance(item, field_type.contained_type): raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path) elif isinstance(field_type, Schema): if not self._valid_schema_default(default): raise SchemaFormatException("Default value for Schema is not valid.", path) else: if not isinstance(default, field_type): raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
python
def _verify_default(self, spec, path): """Verifies that the default specified in the given spec is valid.""" field_type = spec['type'] default = spec['default'] # If it's a function there's nothing we can really do except assume its valid if callable(default): return if isinstance(field_type, Array): # Verify we'd got a list as our default if not isinstance(default, list): raise SchemaFormatException("Default value for Array at {} is not a list of values.", path) # Ensure the contents are of the correct type for i, item in enumerate(default): if isinstance(field_type.contained_type, Schema): if not self._valid_schema_default(item): raise SchemaFormatException("Default value for Schema is not valid.", path) elif not isinstance(item, field_type.contained_type): raise SchemaFormatException("Not all items in the default list for the Array field at {} are of the correct type.", path) elif isinstance(field_type, Schema): if not self._valid_schema_default(default): raise SchemaFormatException("Default value for Schema is not valid.", path) else: if not isinstance(default, field_type): raise SchemaFormatException("Default value for {} is not of the nominated type.", path)
[ "def", "_verify_default", "(", "self", ",", "spec", ",", "path", ")", ":", "field_type", "=", "spec", "[", "'type'", "]", "default", "=", "spec", "[", "'default'", "]", "# If it's a function there's nothing we can really do except assume its valid", "if", "callable", "(", "default", ")", ":", "return", "if", "isinstance", "(", "field_type", ",", "Array", ")", ":", "# Verify we'd got a list as our default", "if", "not", "isinstance", "(", "default", ",", "list", ")", ":", "raise", "SchemaFormatException", "(", "\"Default value for Array at {} is not a list of values.\"", ",", "path", ")", "# Ensure the contents are of the correct type", "for", "i", ",", "item", "in", "enumerate", "(", "default", ")", ":", "if", "isinstance", "(", "field_type", ".", "contained_type", ",", "Schema", ")", ":", "if", "not", "self", ".", "_valid_schema_default", "(", "item", ")", ":", "raise", "SchemaFormatException", "(", "\"Default value for Schema is not valid.\"", ",", "path", ")", "elif", "not", "isinstance", "(", "item", ",", "field_type", ".", "contained_type", ")", ":", "raise", "SchemaFormatException", "(", "\"Not all items in the default list for the Array field at {} are of the correct type.\"", ",", "path", ")", "elif", "isinstance", "(", "field_type", ",", "Schema", ")", ":", "if", "not", "self", ".", "_valid_schema_default", "(", "default", ")", ":", "raise", "SchemaFormatException", "(", "\"Default value for Schema is not valid.\"", ",", "path", ")", "else", ":", "if", "not", "isinstance", "(", "default", ",", "field_type", ")", ":", "raise", "SchemaFormatException", "(", "\"Default value for {} is not of the nominated type.\"", ",", "path", ")" ]
Verifies that the default specified in the given spec is valid.
[ "Verifies", "that", "the", "default", "specified", "in", "the", "given", "spec", "is", "valid", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L128-L156
gamechanger/schemer
schemer/__init__.py
Schema._verify_validates
def _verify_validates(self, spec, path): """Verify thats the 'validates' argument is valid.""" validates = spec['validates'] if isinstance(validates, list): for validator in validates: self._verify_validator(validator, path) else: self._verify_validator(validates, path)
python
def _verify_validates(self, spec, path): """Verify thats the 'validates' argument is valid.""" validates = spec['validates'] if isinstance(validates, list): for validator in validates: self._verify_validator(validator, path) else: self._verify_validator(validates, path)
[ "def", "_verify_validates", "(", "self", ",", "spec", ",", "path", ")", ":", "validates", "=", "spec", "[", "'validates'", "]", "if", "isinstance", "(", "validates", ",", "list", ")", ":", "for", "validator", "in", "validates", ":", "self", ".", "_verify_validator", "(", "validator", ",", "path", ")", "else", ":", "self", ".", "_verify_validator", "(", "validates", ",", "path", ")" ]
Verify thats the 'validates' argument is valid.
[ "Verify", "thats", "the", "validates", "argument", "is", "valid", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L159-L167
gamechanger/schemer
schemer/__init__.py
Schema._verify_validator
def _verify_validator(self, validator, path): """Verifies that a given validator associated with the field at the given path is legitimate.""" # Validator should be a function if not callable(validator): raise SchemaFormatException("Invalid validations for {}", path) # Validator should accept a single argument (args, varargs, keywords, defaults) = getargspec(validator) if len(args) != 1: raise SchemaFormatException("Invalid validations for {}", path)
python
def _verify_validator(self, validator, path): """Verifies that a given validator associated with the field at the given path is legitimate.""" # Validator should be a function if not callable(validator): raise SchemaFormatException("Invalid validations for {}", path) # Validator should accept a single argument (args, varargs, keywords, defaults) = getargspec(validator) if len(args) != 1: raise SchemaFormatException("Invalid validations for {}", path)
[ "def", "_verify_validator", "(", "self", ",", "validator", ",", "path", ")", ":", "# Validator should be a function", "if", "not", "callable", "(", "validator", ")", ":", "raise", "SchemaFormatException", "(", "\"Invalid validations for {}\"", ",", "path", ")", "# Validator should accept a single argument", "(", "args", ",", "varargs", ",", "keywords", ",", "defaults", ")", "=", "getargspec", "(", "validator", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "raise", "SchemaFormatException", "(", "\"Invalid validations for {}\"", ",", "path", ")" ]
Verifies that a given validator associated with the field at the given path is legitimate.
[ "Verifies", "that", "a", "given", "validator", "associated", "with", "the", "field", "at", "the", "given", "path", "is", "legitimate", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L170-L180
gamechanger/schemer
schemer/__init__.py
Schema._validate_instance
def _validate_instance(self, instance, errors, path_prefix=''): """Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid if the errors collection is empty when this method returns.""" if not isinstance(instance, dict): errors[path_prefix] = "Expected instance of dict to validate against schema." return # validate against the schema level validators self._apply_validations(errors, path_prefix, self._validates, instance) # Loop over each field in the schema and check the instance value conforms # to its spec for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # If the field is present, validate it's value. if field in instance: self._validate_value(instance[field], spec, path, errors) else: # If not, add an error if it was a required key. if spec.get('required', False): errors[path] = "{} is required.".format(path) # Now loop over each field in the given instance and make sure we don't # have any fields not declared in the schema, unless strict mode has been # explicitly disabled. if self._strict: for field in instance: if field not in self.doc_spec: errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
python
def _validate_instance(self, instance, errors, path_prefix=''): """Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid if the errors collection is empty when this method returns.""" if not isinstance(instance, dict): errors[path_prefix] = "Expected instance of dict to validate against schema." return # validate against the schema level validators self._apply_validations(errors, path_prefix, self._validates, instance) # Loop over each field in the schema and check the instance value conforms # to its spec for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # If the field is present, validate it's value. if field in instance: self._validate_value(instance[field], spec, path, errors) else: # If not, add an error if it was a required key. if spec.get('required', False): errors[path] = "{} is required.".format(path) # Now loop over each field in the given instance and make sure we don't # have any fields not declared in the schema, unless strict mode has been # explicitly disabled. if self._strict: for field in instance: if field not in self.doc_spec: errors[self._append_path(path_prefix, field)] = "Unexpected document field not present in schema"
[ "def", "_validate_instance", "(", "self", ",", "instance", ",", "errors", ",", "path_prefix", "=", "''", ")", ":", "if", "not", "isinstance", "(", "instance", ",", "dict", ")", ":", "errors", "[", "path_prefix", "]", "=", "\"Expected instance of dict to validate against schema.\"", "return", "# validate against the schema level validators", "self", ".", "_apply_validations", "(", "errors", ",", "path_prefix", ",", "self", ".", "_validates", ",", "instance", ")", "# Loop over each field in the schema and check the instance value conforms", "# to its spec", "for", "field", ",", "spec", "in", "self", ".", "doc_spec", ".", "iteritems", "(", ")", ":", "path", "=", "self", ".", "_append_path", "(", "path_prefix", ",", "field", ")", "# If the field is present, validate it's value.", "if", "field", "in", "instance", ":", "self", ".", "_validate_value", "(", "instance", "[", "field", "]", ",", "spec", ",", "path", ",", "errors", ")", "else", ":", "# If not, add an error if it was a required key.", "if", "spec", ".", "get", "(", "'required'", ",", "False", ")", ":", "errors", "[", "path", "]", "=", "\"{} is required.\"", ".", "format", "(", "path", ")", "# Now loop over each field in the given instance and make sure we don't", "# have any fields not declared in the schema, unless strict mode has been", "# explicitly disabled.", "if", "self", ".", "_strict", ":", "for", "field", "in", "instance", ":", "if", "field", "not", "in", "self", ".", "doc_spec", ":", "errors", "[", "self", ".", "_append_path", "(", "path_prefix", ",", "field", ")", "]", "=", "\"Unexpected document field not present in schema\"" ]
Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid if the errors collection is empty when this method returns.
[ "Validates", "that", "the", "given", "instance", "of", "a", "document", "conforms", "to", "the", "given", "schema", "s", "structure", "and", "validations", ".", "Any", "validation", "errors", "are", "added", "to", "the", "given", "errors", "collection", ".", "The", "caller", "should", "assume", "the", "instance", "is", "considered", "valid", "if", "the", "errors", "collection", "is", "empty", "when", "this", "method", "returns", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L183-L215
gamechanger/schemer
schemer/__init__.py
Schema._validate_value
def _validate_value(self, value, field_spec, path, errors): """Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.""" # Check if the value is None and add an error if the field is not nullable. # Note that for backward compatibility reasons, the default value of 'nullable' # is the inverse of 'required' (which use to mean both that the key be present # and not set to None). if value is None: if not field_spec.get('nullable', not field_spec.get('required', False)): errors[path] = "{} is not nullable.".format(path) return # All fields should have a type field_type = field_spec['type'] if isinstance(field_type, types.FunctionType): try: field_type = field_type(value) except Exception as e: raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path) if not isinstance(field_type, (type, Schema, Array)): raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path) # If our field is an embedded document, recurse into it if isinstance(field_type, Schema): if isinstance(value, dict): field_type._validate_instance(value, errors, path) else: errors[path] = "{} should be an embedded document".format(path) return elif isinstance(field_type, Array): if isinstance(value, list): is_dynamic = isinstance(field_type.contained_type, types.FunctionType) for i, item in enumerate(value): contained_type = field_type.contained_type if is_dynamic: contained_type = contained_type(item) instance_path = self._append_path(path, i) if isinstance(contained_type, Schema): contained_type._validate_instance(item, errors, instance_path) elif not isinstance(item, contained_type): errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path) continue else: errors[path] = "{} should be an embedded array".format(path) return elif not isinstance(value, field_type): errors[path] = "Field should be of type {}".format(field_type) return validations = field_spec.get('validates', None) if validations is None: return self._apply_validations(errors, path, validations, value)
python
def _validate_value(self, value, field_spec, path, errors): """Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.""" # Check if the value is None and add an error if the field is not nullable. # Note that for backward compatibility reasons, the default value of 'nullable' # is the inverse of 'required' (which use to mean both that the key be present # and not set to None). if value is None: if not field_spec.get('nullable', not field_spec.get('required', False)): errors[path] = "{} is not nullable.".format(path) return # All fields should have a type field_type = field_spec['type'] if isinstance(field_type, types.FunctionType): try: field_type = field_type(value) except Exception as e: raise SchemaFormatException("Dynamic schema function raised exception: {}".format(str(e)), path) if not isinstance(field_type, (type, Schema, Array)): raise SchemaFormatException("Dynamic schema function did not return a type at path {}", path) # If our field is an embedded document, recurse into it if isinstance(field_type, Schema): if isinstance(value, dict): field_type._validate_instance(value, errors, path) else: errors[path] = "{} should be an embedded document".format(path) return elif isinstance(field_type, Array): if isinstance(value, list): is_dynamic = isinstance(field_type.contained_type, types.FunctionType) for i, item in enumerate(value): contained_type = field_type.contained_type if is_dynamic: contained_type = contained_type(item) instance_path = self._append_path(path, i) if isinstance(contained_type, Schema): contained_type._validate_instance(item, errors, instance_path) elif not isinstance(item, contained_type): errors[instance_path] = "Array item at {} is of incorrect type".format(instance_path) continue else: errors[path] = "{} should be an embedded array".format(path) return elif not isinstance(value, field_type): errors[path] = "Field should be of type {}".format(field_type) return validations = field_spec.get('validates', None) if validations is None: return self._apply_validations(errors, path, validations, value)
[ "def", "_validate_value", "(", "self", ",", "value", ",", "field_spec", ",", "path", ",", "errors", ")", ":", "# Check if the value is None and add an error if the field is not nullable.", "# Note that for backward compatibility reasons, the default value of 'nullable'", "# is the inverse of 'required' (which use to mean both that the key be present", "# and not set to None).", "if", "value", "is", "None", ":", "if", "not", "field_spec", ".", "get", "(", "'nullable'", ",", "not", "field_spec", ".", "get", "(", "'required'", ",", "False", ")", ")", ":", "errors", "[", "path", "]", "=", "\"{} is not nullable.\"", ".", "format", "(", "path", ")", "return", "# All fields should have a type", "field_type", "=", "field_spec", "[", "'type'", "]", "if", "isinstance", "(", "field_type", ",", "types", ".", "FunctionType", ")", ":", "try", ":", "field_type", "=", "field_type", "(", "value", ")", "except", "Exception", "as", "e", ":", "raise", "SchemaFormatException", "(", "\"Dynamic schema function raised exception: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ",", "path", ")", "if", "not", "isinstance", "(", "field_type", ",", "(", "type", ",", "Schema", ",", "Array", ")", ")", ":", "raise", "SchemaFormatException", "(", "\"Dynamic schema function did not return a type at path {}\"", ",", "path", ")", "# If our field is an embedded document, recurse into it", "if", "isinstance", "(", "field_type", ",", "Schema", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "field_type", ".", "_validate_instance", "(", "value", ",", "errors", ",", "path", ")", "else", ":", "errors", "[", "path", "]", "=", "\"{} should be an embedded document\"", ".", "format", "(", "path", ")", "return", "elif", "isinstance", "(", "field_type", ",", "Array", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "is_dynamic", "=", "isinstance", "(", "field_type", ".", "contained_type", ",", "types", ".", "FunctionType", ")", "for", "i", ",", "item", "in", "enumerate", "(", "value", ")", ":", "contained_type", "=", "field_type", ".", "contained_type", "if", "is_dynamic", ":", "contained_type", "=", "contained_type", "(", "item", ")", "instance_path", "=", "self", ".", "_append_path", "(", "path", ",", "i", ")", "if", "isinstance", "(", "contained_type", ",", "Schema", ")", ":", "contained_type", ".", "_validate_instance", "(", "item", ",", "errors", ",", "instance_path", ")", "elif", "not", "isinstance", "(", "item", ",", "contained_type", ")", ":", "errors", "[", "instance_path", "]", "=", "\"Array item at {} is of incorrect type\"", ".", "format", "(", "instance_path", ")", "continue", "else", ":", "errors", "[", "path", "]", "=", "\"{} should be an embedded array\"", ".", "format", "(", "path", ")", "return", "elif", "not", "isinstance", "(", "value", ",", "field_type", ")", ":", "errors", "[", "path", "]", "=", "\"Field should be of type {}\"", ".", "format", "(", "field_type", ")", "return", "validations", "=", "field_spec", ".", "get", "(", "'validates'", ",", "None", ")", "if", "validations", "is", "None", ":", "return", "self", ".", "_apply_validations", "(", "errors", ",", "path", ",", "validations", ",", "value", ")" ]
Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.
[ "Validates", "that", "the", "given", "field", "value", "is", "valid", "given", "the", "associated", "field", "spec", "and", "path", ".", "Any", "validation", "failures", "are", "added", "to", "the", "given", "errors", "collection", "." ]
train
https://github.com/gamechanger/schemer/blob/1d1dd7da433d3b84ce5a80ded5a84ab4a65825ee/schemer/__init__.py#L217-L274
GaretJax/lancet
lancet/settings.py
load_config
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files. """ if defaults is None: defaults = DEFAULT_FILES config = ConfigParser(allow_no_value=True) if defaults: config.read(defaults) if path: with open(path) as fh: config.read_file(fh) return config
python
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files. """ if defaults is None: defaults = DEFAULT_FILES config = ConfigParser(allow_no_value=True) if defaults: config.read(defaults) if path: with open(path) as fh: config.read_file(fh) return config
[ "def", "load_config", "(", "path", "=", "None", ",", "defaults", "=", "None", ")", ":", "if", "defaults", "is", "None", ":", "defaults", "=", "DEFAULT_FILES", "config", "=", "ConfigParser", "(", "allow_no_value", "=", "True", ")", "if", "defaults", ":", "config", ".", "read", "(", "defaults", ")", "if", "path", ":", "with", "open", "(", "path", ")", "as", "fh", ":", "config", ".", "read_file", "(", "fh", ")", "return", "config" ]
Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files.
[ "Loads", "and", "parses", "an", "INI", "style", "configuration", "file", "using", "Python", "s", "built", "-", "in", "configparser", "module", ".", "If", "path", "is", "specified", "load", "it", ".", "If", "defaults", "(", "a", "list", "of", "strings", ")", "is", "given", "try", "to", "load", "each", "entry", "as", "a", "file", "without", "throwing", "any", "error", "if", "the", "operation", "fails", ".", "If", "defaults", "is", "not", "given", "the", "following", "locations", "listed", "in", "the", "DEFAULT_FILES", "constant", "are", "tried", ".", "To", "completely", "disable", "defaults", "loading", "pass", "in", "an", "empty", "list", "or", "False", ".", "Returns", "the", "SafeConfigParser", "instance", "used", "to", "load", "and", "parse", "the", "files", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/settings.py#L37-L61
GaretJax/lancet
lancet/settings.py
as_dict
def as_dict(config): """ Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs. """ settings = defaultdict(lambda: {}) for section in config.sections(): for key, val in config.items(section): settings[section][key] = val return settings
python
def as_dict(config): """ Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs. """ settings = defaultdict(lambda: {}) for section in config.sections(): for key, val in config.items(section): settings[section][key] = val return settings
[ "def", "as_dict", "(", "config", ")", ":", "settings", "=", "defaultdict", "(", "lambda", ":", "{", "}", ")", "for", "section", "in", "config", ".", "sections", "(", ")", ":", "for", "key", ",", "val", "in", "config", ".", "items", "(", "section", ")", ":", "settings", "[", "section", "]", "[", "key", "]", "=", "val", "return", "settings" ]
Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs.
[ "Converts", "a", "ConfigParser", "object", "into", "a", "dictionary", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/settings.py#L85-L96
walkr/nanoservice
nanoservice/core.py
Endpoint.initialize
def initialize(self, timeouts): """ Bind or connect the nanomsg socket to some address """ # Bind or connect to address if self.bind is True: self.socket.bind(self.address) else: self.socket.connect(self.address) # Set send and recv timeouts self._set_timeouts(timeouts)
python
def initialize(self, timeouts): """ Bind or connect the nanomsg socket to some address """ # Bind or connect to address if self.bind is True: self.socket.bind(self.address) else: self.socket.connect(self.address) # Set send and recv timeouts self._set_timeouts(timeouts)
[ "def", "initialize", "(", "self", ",", "timeouts", ")", ":", "# Bind or connect to address", "if", "self", ".", "bind", "is", "True", ":", "self", ".", "socket", ".", "bind", "(", "self", ".", "address", ")", "else", ":", "self", ".", "socket", ".", "connect", "(", "self", ".", "address", ")", "# Set send and recv timeouts", "self", ".", "_set_timeouts", "(", "timeouts", ")" ]
Bind or connect the nanomsg socket to some address
[ "Bind", "or", "connect", "the", "nanomsg", "socket", "to", "some", "address" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L67-L77
walkr/nanoservice
nanoservice/core.py
Endpoint._set_timeouts
def _set_timeouts(self, timeouts): """ Set socket timeouts for send and receive respectively """ (send_timeout, recv_timeout) = (None, None) try: (send_timeout, recv_timeout) = timeouts except TypeError: raise EndpointError( '`timeouts` must be a pair of numbers (2, 3) which represent ' 'the timeout values for send and receive respectively') if send_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.SNDTIMEO, send_timeout) if recv_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.RCVTIMEO, recv_timeout)
python
def _set_timeouts(self, timeouts): """ Set socket timeouts for send and receive respectively """ (send_timeout, recv_timeout) = (None, None) try: (send_timeout, recv_timeout) = timeouts except TypeError: raise EndpointError( '`timeouts` must be a pair of numbers (2, 3) which represent ' 'the timeout values for send and receive respectively') if send_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.SNDTIMEO, send_timeout) if recv_timeout is not None: self.socket.set_int_option( nanomsg.SOL_SOCKET, nanomsg.RCVTIMEO, recv_timeout)
[ "def", "_set_timeouts", "(", "self", ",", "timeouts", ")", ":", "(", "send_timeout", ",", "recv_timeout", ")", "=", "(", "None", ",", "None", ")", "try", ":", "(", "send_timeout", ",", "recv_timeout", ")", "=", "timeouts", "except", "TypeError", ":", "raise", "EndpointError", "(", "'`timeouts` must be a pair of numbers (2, 3) which represent '", "'the timeout values for send and receive respectively'", ")", "if", "send_timeout", "is", "not", "None", ":", "self", ".", "socket", ".", "set_int_option", "(", "nanomsg", ".", "SOL_SOCKET", ",", "nanomsg", ".", "SNDTIMEO", ",", "send_timeout", ")", "if", "recv_timeout", "is", "not", "None", ":", "self", ".", "socket", ".", "set_int_option", "(", "nanomsg", ".", "SOL_SOCKET", ",", "nanomsg", ".", "RCVTIMEO", ",", "recv_timeout", ")" ]
Set socket timeouts for send and receive respectively
[ "Set", "socket", "timeouts", "for", "send", "and", "receive", "respectively" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L79-L97
walkr/nanoservice
nanoservice/core.py
Endpoint.send
def send(self, payload): """ Encode and sign (optional) the send through socket """ payload = self.encode(payload) payload = self.sign(payload) self.socket.send(payload)
python
def send(self, payload): """ Encode and sign (optional) the send through socket """ payload = self.encode(payload) payload = self.sign(payload) self.socket.send(payload)
[ "def", "send", "(", "self", ",", "payload", ")", ":", "payload", "=", "self", ".", "encode", "(", "payload", ")", "payload", "=", "self", ".", "sign", "(", "payload", ")", "self", ".", "socket", ".", "send", "(", "payload", ")" ]
Encode and sign (optional) the send through socket
[ "Encode", "and", "sign", "(", "optional", ")", "the", "send", "through", "socket" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L99-L103
walkr/nanoservice
nanoservice/core.py
Endpoint.receive
def receive(self, decode=True): """ Receive from socket, authenticate and decode payload """ payload = self.socket.recv() payload = self.verify(payload) if decode: payload = self.decode(payload) return payload
python
def receive(self, decode=True): """ Receive from socket, authenticate and decode payload """ payload = self.socket.recv() payload = self.verify(payload) if decode: payload = self.decode(payload) return payload
[ "def", "receive", "(", "self", ",", "decode", "=", "True", ")", ":", "payload", "=", "self", ".", "socket", ".", "recv", "(", ")", "payload", "=", "self", ".", "verify", "(", "payload", ")", "if", "decode", ":", "payload", "=", "self", ".", "decode", "(", "payload", ")", "return", "payload" ]
Receive from socket, authenticate and decode payload
[ "Receive", "from", "socket", "authenticate", "and", "decode", "payload" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L105-L111
walkr/nanoservice
nanoservice/core.py
Endpoint.sign
def sign(self, payload): """ Sign payload using the supplied authenticator """ if self.authenticator: return self.authenticator.signed(payload) return payload
python
def sign(self, payload): """ Sign payload using the supplied authenticator """ if self.authenticator: return self.authenticator.signed(payload) return payload
[ "def", "sign", "(", "self", ",", "payload", ")", ":", "if", "self", ".", "authenticator", ":", "return", "self", ".", "authenticator", ".", "signed", "(", "payload", ")", "return", "payload" ]
Sign payload using the supplied authenticator
[ "Sign", "payload", "using", "the", "supplied", "authenticator" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L113-L117
walkr/nanoservice
nanoservice/core.py
Endpoint.verify
def verify(self, payload): """ Verify payload authenticity via the supplied authenticator """ if not self.authenticator: return payload try: self.authenticator.auth(payload) return self.authenticator.unsigned(payload) except AuthenticatorInvalidSignature: raise except Exception as exception: raise AuthenticateError(str(exception))
python
def verify(self, payload): """ Verify payload authenticity via the supplied authenticator """ if not self.authenticator: return payload try: self.authenticator.auth(payload) return self.authenticator.unsigned(payload) except AuthenticatorInvalidSignature: raise except Exception as exception: raise AuthenticateError(str(exception))
[ "def", "verify", "(", "self", ",", "payload", ")", ":", "if", "not", "self", ".", "authenticator", ":", "return", "payload", "try", ":", "self", ".", "authenticator", ".", "auth", "(", "payload", ")", "return", "self", ".", "authenticator", ".", "unsigned", "(", "payload", ")", "except", "AuthenticatorInvalidSignature", ":", "raise", "except", "Exception", "as", "exception", ":", "raise", "AuthenticateError", "(", "str", "(", "exception", ")", ")" ]
Verify payload authenticity via the supplied authenticator
[ "Verify", "payload", "authenticity", "via", "the", "supplied", "authenticator" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L119-L129
walkr/nanoservice
nanoservice/core.py
Endpoint.decode
def decode(self, payload): """ Decode payload """ try: return self.encoder.decode(payload) except Exception as exception: raise DecodeError(str(exception))
python
def decode(self, payload): """ Decode payload """ try: return self.encoder.decode(payload) except Exception as exception: raise DecodeError(str(exception))
[ "def", "decode", "(", "self", ",", "payload", ")", ":", "try", ":", "return", "self", ".", "encoder", ".", "decode", "(", "payload", ")", "except", "Exception", "as", "exception", ":", "raise", "DecodeError", "(", "str", "(", "exception", ")", ")" ]
Decode payload
[ "Decode", "payload" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L131-L136
walkr/nanoservice
nanoservice/core.py
Endpoint.encode
def encode(self, payload): """ Encode payload """ try: return self.encoder.encode(payload) except Exception as exception: raise EncodeError(str(exception))
python
def encode(self, payload): """ Encode payload """ try: return self.encoder.encode(payload) except Exception as exception: raise EncodeError(str(exception))
[ "def", "encode", "(", "self", ",", "payload", ")", ":", "try", ":", "return", "self", ".", "encoder", ".", "encode", "(", "payload", ")", "except", "Exception", "as", "exception", ":", "raise", "EncodeError", "(", "str", "(", "exception", ")", ")" ]
Encode payload
[ "Encode", "payload" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L138-L143
walkr/nanoservice
nanoservice/core.py
Process.start
def start(self): """ Start and listen for calls """ if threading.current_thread().name == 'MainThread': signal.signal(signal.SIGINT, self.stop) logging.info('Started on {}'.format(self.address)) while True: self.process()
python
def start(self): """ Start and listen for calls """ if threading.current_thread().name == 'MainThread': signal.signal(signal.SIGINT, self.stop) logging.info('Started on {}'.format(self.address)) while True: self.process()
[ "def", "start", "(", "self", ")", ":", "if", "threading", ".", "current_thread", "(", ")", ".", "name", "==", "'MainThread'", ":", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "self", ".", "stop", ")", "logging", ".", "info", "(", "'Started on {}'", ".", "format", "(", "self", ".", "address", ")", ")", "while", "True", ":", "self", ".", "process", "(", ")" ]
Start and listen for calls
[ "Start", "and", "listen", "for", "calls" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L149-L158
walkr/nanoservice
nanoservice/core.py
Process.stop
def stop(self, dummy_signum=None, dummy_frame=None): """ Shutdown process (this method is also a signal handler) """ logging.info('Shutting down ...') self.socket.close() sys.exit(0)
python
def stop(self, dummy_signum=None, dummy_frame=None): """ Shutdown process (this method is also a signal handler) """ logging.info('Shutting down ...') self.socket.close() sys.exit(0)
[ "def", "stop", "(", "self", ",", "dummy_signum", "=", "None", ",", "dummy_frame", "=", "None", ")", ":", "logging", ".", "info", "(", "'Shutting down ...'", ")", "self", ".", "socket", ".", "close", "(", ")", "sys", ".", "exit", "(", "0", ")" ]
Shutdown process (this method is also a signal handler)
[ "Shutdown", "process", "(", "this", "method", "is", "also", "a", "signal", "handler", ")" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/core.py#L160-L164
RockFeng0/rtsf
rtsf/p_report.py
HtmlReporter.get_summary
def get_summary(list_all=[], **kwargs): ''' summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_page: home page url ''' all_summary = [] for module in list_all: summary = { "module_name" : module['Name'], "show_all" : kwargs.get("show_all",True), "project_name" : kwargs.get("proj_name","TestProject"), "home_page" : kwargs.get("home_page",__about__.HOME_PAGE), "start_time" : "", "end_time" : "", "duration_seconds" : "", "total_case_num" : len(module["TestCases"]), "pass_cases_num" : 0, "fail_cases_num" : 0, "details" : [] } for case in module["TestCases"]: case_detail = {} case_detail["linkurl"] = "./caselogs/%s_%s.log" %(case["case_name"],case["exec_date"]) if case["status"].lower() == "pass": summary["pass_cases_num"] += 1 case_detail["c_style"] = "tr_pass" else: summary["fail_cases_num"] += 1 case_detail["c_style"] = "tr_fail" case_detail.update(case) summary["details"].append(case_detail) try: st = module["TestCases"][0].get("start_at") et = module["TestCases"][-1].get("end_at") summary["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(st)) summary["end_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(et)) summary["duration_seconds"] = float("%.2f" %(et - st)) except Exception as _: logger.log_warning("Will set 'start_at' and 'end_at' to 'None'") (summary["start_time"], summary["end_time"], summary["duration_seconds"]) = (None,None,None) if summary["fail_cases_num"] > 0: summary["dict_report"] = {"result":0,"message":"failure","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} else: summary["dict_report"] = {"result":1,"message":"success","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} all_summary.append(summary) return all_summary
python
def get_summary(list_all=[], **kwargs): ''' summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_page: home page url ''' all_summary = [] for module in list_all: summary = { "module_name" : module['Name'], "show_all" : kwargs.get("show_all",True), "project_name" : kwargs.get("proj_name","TestProject"), "home_page" : kwargs.get("home_page",__about__.HOME_PAGE), "start_time" : "", "end_time" : "", "duration_seconds" : "", "total_case_num" : len(module["TestCases"]), "pass_cases_num" : 0, "fail_cases_num" : 0, "details" : [] } for case in module["TestCases"]: case_detail = {} case_detail["linkurl"] = "./caselogs/%s_%s.log" %(case["case_name"],case["exec_date"]) if case["status"].lower() == "pass": summary["pass_cases_num"] += 1 case_detail["c_style"] = "tr_pass" else: summary["fail_cases_num"] += 1 case_detail["c_style"] = "tr_fail" case_detail.update(case) summary["details"].append(case_detail) try: st = module["TestCases"][0].get("start_at") et = module["TestCases"][-1].get("end_at") summary["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(st)) summary["end_time"] = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(et)) summary["duration_seconds"] = float("%.2f" %(et - st)) except Exception as _: logger.log_warning("Will set 'start_at' and 'end_at' to 'None'") (summary["start_time"], summary["end_time"], summary["duration_seconds"]) = (None,None,None) if summary["fail_cases_num"] > 0: summary["dict_report"] = {"result":0,"message":"failure","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} else: summary["dict_report"] = {"result":1,"message":"success","pass":summary["pass_cases_num"],"fail":summary["fail_cases_num"]} all_summary.append(summary) return all_summary
[ "def", "get_summary", "(", "list_all", "=", "[", "]", ",", "*", "*", "kwargs", ")", ":", "all_summary", "=", "[", "]", "for", "module", "in", "list_all", ":", "summary", "=", "{", "\"module_name\"", ":", "module", "[", "'Name'", "]", ",", "\"show_all\"", ":", "kwargs", ".", "get", "(", "\"show_all\"", ",", "True", ")", ",", "\"project_name\"", ":", "kwargs", ".", "get", "(", "\"proj_name\"", ",", "\"TestProject\"", ")", ",", "\"home_page\"", ":", "kwargs", ".", "get", "(", "\"home_page\"", ",", "__about__", ".", "HOME_PAGE", ")", ",", "\"start_time\"", ":", "\"\"", ",", "\"end_time\"", ":", "\"\"", ",", "\"duration_seconds\"", ":", "\"\"", ",", "\"total_case_num\"", ":", "len", "(", "module", "[", "\"TestCases\"", "]", ")", ",", "\"pass_cases_num\"", ":", "0", ",", "\"fail_cases_num\"", ":", "0", ",", "\"details\"", ":", "[", "]", "}", "for", "case", "in", "module", "[", "\"TestCases\"", "]", ":", "case_detail", "=", "{", "}", "case_detail", "[", "\"linkurl\"", "]", "=", "\"./caselogs/%s_%s.log\"", "%", "(", "case", "[", "\"case_name\"", "]", ",", "case", "[", "\"exec_date\"", "]", ")", "if", "case", "[", "\"status\"", "]", ".", "lower", "(", ")", "==", "\"pass\"", ":", "summary", "[", "\"pass_cases_num\"", "]", "+=", "1", "case_detail", "[", "\"c_style\"", "]", "=", "\"tr_pass\"", "else", ":", "summary", "[", "\"fail_cases_num\"", "]", "+=", "1", "case_detail", "[", "\"c_style\"", "]", "=", "\"tr_fail\"", "case_detail", ".", "update", "(", "case", ")", "summary", "[", "\"details\"", "]", ".", "append", "(", "case_detail", ")", "try", ":", "st", "=", "module", "[", "\"TestCases\"", "]", "[", "0", "]", ".", "get", "(", "\"start_at\"", ")", "et", "=", "module", "[", "\"TestCases\"", "]", "[", "-", "1", "]", ".", "get", "(", "\"end_at\"", ")", "summary", "[", "\"start_time\"", "]", "=", "time", ".", "strftime", "(", "\"%Y-%m-%d %H:%M:%S\"", ",", "time", ".", "localtime", "(", "st", ")", ")", "summary", "[", "\"end_time\"", "]", "=", "time", ".", "strftime", "(", "\"%Y-%m-%d %H:%M:%S\"", ",", "time", ".", "localtime", "(", "et", ")", ")", "summary", "[", "\"duration_seconds\"", "]", "=", "float", "(", "\"%.2f\"", "%", "(", "et", "-", "st", ")", ")", "except", "Exception", "as", "_", ":", "logger", ".", "log_warning", "(", "\"Will set 'start_at' and 'end_at' to 'None'\"", ")", "(", "summary", "[", "\"start_time\"", "]", ",", "summary", "[", "\"end_time\"", "]", ",", "summary", "[", "\"duration_seconds\"", "]", ")", "=", "(", "None", ",", "None", ",", "None", ")", "if", "summary", "[", "\"fail_cases_num\"", "]", ">", "0", ":", "summary", "[", "\"dict_report\"", "]", "=", "{", "\"result\"", ":", "0", ",", "\"message\"", ":", "\"failure\"", ",", "\"pass\"", ":", "summary", "[", "\"pass_cases_num\"", "]", ",", "\"fail\"", ":", "summary", "[", "\"fail_cases_num\"", "]", "}", "else", ":", "summary", "[", "\"dict_report\"", "]", "=", "{", "\"result\"", ":", "1", ",", "\"message\"", ":", "\"success\"", ",", "\"pass\"", ":", "summary", "[", "\"pass_cases_num\"", "]", ",", "\"fail\"", ":", "summary", "[", "\"fail_cases_num\"", "]", "}", "all_summary", ".", "append", "(", "summary", ")", "return", "all_summary" ]
summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_page: home page url
[ "summarize", "the", "report", "data" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_report.py#L134-L193
RockFeng0/rtsf
rtsf/p_report.py
HtmlReporter.add_report_data
def add_report_data(list_all=[], module_name="TestModule", **kwargs): ''' add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name status: test result, Pass or Fail resp_tester: responsible tester who write this case tester: tester who execute the test start_at: tester run this case at time end_at: tester stop this case at time ''' start_at = kwargs.get("start_at") case_name = kwargs.get("case_name","TestCase") raw_case_name = kwargs.get("raw_case_name","TestCase") exec_date_time = time.localtime(start_at) execdate = time.strftime("%Y-%m-%d",exec_date_time) exectime = time.strftime("%H:%M:%S",exec_date_time) _case_report = { 'resp_tester': kwargs.get("resp_tester","administrator"), 'tester': kwargs.get("tester","administrator"), 'case_name': case_name, 'raw_case_name': raw_case_name, 'status': kwargs.get("status","Pass"), 'exec_date': execdate, 'exec_time': exectime, 'start_at': start_at, 'end_at': kwargs.get("end_at"), } for module in list_all: if module_name != module["Name"]: continue for case in module["TestCases"]: if raw_case_name == case["raw_case_name"]: case.update(_case_report) return list_all module["TestCases"].append(_case_report) return list_all list_all.append({"Name": module_name, "TestCases": [_case_report]}) return list_all
python
def add_report_data(list_all=[], module_name="TestModule", **kwargs): ''' add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name status: test result, Pass or Fail resp_tester: responsible tester who write this case tester: tester who execute the test start_at: tester run this case at time end_at: tester stop this case at time ''' start_at = kwargs.get("start_at") case_name = kwargs.get("case_name","TestCase") raw_case_name = kwargs.get("raw_case_name","TestCase") exec_date_time = time.localtime(start_at) execdate = time.strftime("%Y-%m-%d",exec_date_time) exectime = time.strftime("%H:%M:%S",exec_date_time) _case_report = { 'resp_tester': kwargs.get("resp_tester","administrator"), 'tester': kwargs.get("tester","administrator"), 'case_name': case_name, 'raw_case_name': raw_case_name, 'status': kwargs.get("status","Pass"), 'exec_date': execdate, 'exec_time': exectime, 'start_at': start_at, 'end_at': kwargs.get("end_at"), } for module in list_all: if module_name != module["Name"]: continue for case in module["TestCases"]: if raw_case_name == case["raw_case_name"]: case.update(_case_report) return list_all module["TestCases"].append(_case_report) return list_all list_all.append({"Name": module_name, "TestCases": [_case_report]}) return list_all
[ "def", "add_report_data", "(", "list_all", "=", "[", "]", ",", "module_name", "=", "\"TestModule\"", ",", "*", "*", "kwargs", ")", ":", "start_at", "=", "kwargs", ".", "get", "(", "\"start_at\"", ")", "case_name", "=", "kwargs", ".", "get", "(", "\"case_name\"", ",", "\"TestCase\"", ")", "raw_case_name", "=", "kwargs", ".", "get", "(", "\"raw_case_name\"", ",", "\"TestCase\"", ")", "exec_date_time", "=", "time", ".", "localtime", "(", "start_at", ")", "execdate", "=", "time", ".", "strftime", "(", "\"%Y-%m-%d\"", ",", "exec_date_time", ")", "exectime", "=", "time", ".", "strftime", "(", "\"%H:%M:%S\"", ",", "exec_date_time", ")", "_case_report", "=", "{", "'resp_tester'", ":", "kwargs", ".", "get", "(", "\"resp_tester\"", ",", "\"administrator\"", ")", ",", "'tester'", ":", "kwargs", ".", "get", "(", "\"tester\"", ",", "\"administrator\"", ")", ",", "'case_name'", ":", "case_name", ",", "'raw_case_name'", ":", "raw_case_name", ",", "'status'", ":", "kwargs", ".", "get", "(", "\"status\"", ",", "\"Pass\"", ")", ",", "'exec_date'", ":", "execdate", ",", "'exec_time'", ":", "exectime", ",", "'start_at'", ":", "start_at", ",", "'end_at'", ":", "kwargs", ".", "get", "(", "\"end_at\"", ")", ",", "}", "for", "module", "in", "list_all", ":", "if", "module_name", "!=", "module", "[", "\"Name\"", "]", ":", "continue", "for", "case", "in", "module", "[", "\"TestCases\"", "]", ":", "if", "raw_case_name", "==", "case", "[", "\"raw_case_name\"", "]", ":", "case", ".", "update", "(", "_case_report", ")", "return", "list_all", "module", "[", "\"TestCases\"", "]", ".", "append", "(", "_case_report", ")", "return", "list_all", "list_all", ".", "append", "(", "{", "\"Name\"", ":", "module_name", ",", "\"TestCases\"", ":", "[", "_case_report", "]", "}", ")", "return", "list_all" ]
add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name status: test result, Pass or Fail resp_tester: responsible tester who write this case tester: tester who execute the test start_at: tester run this case at time end_at: tester stop this case at time
[ "add", "report", "data", "to", "a", "list" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_report.py#L196-L241
walkr/nanoservice
nanoservice/pubsub.py
Subscriber.parse
def parse(self, subscription): """ Fetch the function registered for a certain subscription """ for name in self.methods: tag = bytes(name.encode('utf-8')) if subscription.startswith(tag): fun = self.methods.get(name) message = subscription[len(tag):] return tag, message, fun return None, None, None
python
def parse(self, subscription): """ Fetch the function registered for a certain subscription """ for name in self.methods: tag = bytes(name.encode('utf-8')) if subscription.startswith(tag): fun = self.methods.get(name) message = subscription[len(tag):] return tag, message, fun return None, None, None
[ "def", "parse", "(", "self", ",", "subscription", ")", ":", "for", "name", "in", "self", ".", "methods", ":", "tag", "=", "bytes", "(", "name", ".", "encode", "(", "'utf-8'", ")", ")", "if", "subscription", ".", "startswith", "(", "tag", ")", ":", "fun", "=", "self", ".", "methods", ".", "get", "(", "name", ")", "message", "=", "subscription", "[", "len", "(", "tag", ")", ":", "]", "return", "tag", ",", "message", ",", "fun", "return", "None", ",", "None", ",", "None" ]
Fetch the function registered for a certain subscription
[ "Fetch", "the", "function", "registered", "for", "a", "certain", "subscription" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/pubsub.py#L57-L66
walkr/nanoservice
nanoservice/pubsub.py
Subscriber.subscribe
def subscribe(self, tag, fun, description=None): """ Subscribe to something and register a function """ self.methods[tag] = fun self.descriptions[tag] = description self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag)
python
def subscribe(self, tag, fun, description=None): """ Subscribe to something and register a function """ self.methods[tag] = fun self.descriptions[tag] = description self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag)
[ "def", "subscribe", "(", "self", ",", "tag", ",", "fun", ",", "description", "=", "None", ")", ":", "self", ".", "methods", "[", "tag", "]", "=", "fun", "self", ".", "descriptions", "[", "tag", "]", "=", "description", "self", ".", "socket", ".", "set_string_option", "(", "nanomsg", ".", "SUB", ",", "nanomsg", ".", "SUB_SUBSCRIBE", ",", "tag", ")" ]
Subscribe to something and register a function
[ "Subscribe", "to", "something", "and", "register", "a", "function" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/pubsub.py#L72-L76
walkr/nanoservice
nanoservice/pubsub.py
Subscriber.process
def process(self): """ Receive a subscription from the socket and process it """ subscription = None result = None try: subscription = self.socket.recv() except AuthenticateError as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except AuthenticatorInvalidSignature as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except DecodeError as exception: logging.error( 'Subscriber error while decoding request: {}' .format(exception), exc_info=1) except RequestParseError as exception: logging.error( 'Subscriber error while parsing request: {}' .format(exception), exc_info=1) else: logging.debug( 'Subscriber received payload: {}' .format(subscription)) _tag, message, fun = self.parse(subscription) message = self.verify(message) message = self.decode(message) try: result = fun(message) except Exception as exception: logging.error(exception, exc_info=1) # Return result to check successful execution of `fun` when testing return result
python
def process(self): """ Receive a subscription from the socket and process it """ subscription = None result = None try: subscription = self.socket.recv() except AuthenticateError as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except AuthenticatorInvalidSignature as exception: logging.error( 'Subscriber error while authenticating request: {}' .format(exception), exc_info=1) except DecodeError as exception: logging.error( 'Subscriber error while decoding request: {}' .format(exception), exc_info=1) except RequestParseError as exception: logging.error( 'Subscriber error while parsing request: {}' .format(exception), exc_info=1) else: logging.debug( 'Subscriber received payload: {}' .format(subscription)) _tag, message, fun = self.parse(subscription) message = self.verify(message) message = self.decode(message) try: result = fun(message) except Exception as exception: logging.error(exception, exc_info=1) # Return result to check successful execution of `fun` when testing return result
[ "def", "process", "(", "self", ")", ":", "subscription", "=", "None", "result", "=", "None", "try", ":", "subscription", "=", "self", ".", "socket", ".", "recv", "(", ")", "except", "AuthenticateError", "as", "exception", ":", "logging", ".", "error", "(", "'Subscriber error while authenticating request: {}'", ".", "format", "(", "exception", ")", ",", "exc_info", "=", "1", ")", "except", "AuthenticatorInvalidSignature", "as", "exception", ":", "logging", ".", "error", "(", "'Subscriber error while authenticating request: {}'", ".", "format", "(", "exception", ")", ",", "exc_info", "=", "1", ")", "except", "DecodeError", "as", "exception", ":", "logging", ".", "error", "(", "'Subscriber error while decoding request: {}'", ".", "format", "(", "exception", ")", ",", "exc_info", "=", "1", ")", "except", "RequestParseError", "as", "exception", ":", "logging", ".", "error", "(", "'Subscriber error while parsing request: {}'", ".", "format", "(", "exception", ")", ",", "exc_info", "=", "1", ")", "else", ":", "logging", ".", "debug", "(", "'Subscriber received payload: {}'", ".", "format", "(", "subscription", ")", ")", "_tag", ",", "message", ",", "fun", "=", "self", ".", "parse", "(", "subscription", ")", "message", "=", "self", ".", "verify", "(", "message", ")", "message", "=", "self", ".", "decode", "(", "message", ")", "try", ":", "result", "=", "fun", "(", "message", ")", "except", "Exception", "as", "exception", ":", "logging", ".", "error", "(", "exception", ",", "exc_info", "=", "1", ")", "# Return result to check successful execution of `fun` when testing", "return", "result" ]
Receive a subscription from the socket and process it
[ "Receive", "a", "subscription", "from", "the", "socket", "and", "process", "it" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/pubsub.py#L80-L124
walkr/nanoservice
nanoservice/pubsub.py
Publisher.build_payload
def build_payload(self, tag, message): """ Encode, sign payload(optional) and attach subscription tag """ message = self.encode(message) message = self.sign(message) payload = bytes(tag.encode('utf-8')) + message return payload
python
def build_payload(self, tag, message): """ Encode, sign payload(optional) and attach subscription tag """ message = self.encode(message) message = self.sign(message) payload = bytes(tag.encode('utf-8')) + message return payload
[ "def", "build_payload", "(", "self", ",", "tag", ",", "message", ")", ":", "message", "=", "self", ".", "encode", "(", "message", ")", "message", "=", "self", ".", "sign", "(", "message", ")", "payload", "=", "bytes", "(", "tag", ".", "encode", "(", "'utf-8'", ")", ")", "+", "message", "return", "payload" ]
Encode, sign payload(optional) and attach subscription tag
[ "Encode", "sign", "payload", "(", "optional", ")", "and", "attach", "subscription", "tag" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/pubsub.py#L142-L147
walkr/nanoservice
nanoservice/pubsub.py
Publisher.publish
def publish(self, tag, message): """ Publish a message down the socket """ payload = self.build_payload(tag, message) self.socket.send(payload)
python
def publish(self, tag, message): """ Publish a message down the socket """ payload = self.build_payload(tag, message) self.socket.send(payload)
[ "def", "publish", "(", "self", ",", "tag", ",", "message", ")", ":", "payload", "=", "self", ".", "build_payload", "(", "tag", ",", "message", ")", "self", ".", "socket", ".", "send", "(", "payload", ")" ]
Publish a message down the socket
[ "Publish", "a", "message", "down", "the", "socket" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/nanoservice/pubsub.py#L149-L152
walkr/nanoservice
benchmarks/bench_pub_sub_auth.py
start_service
def start_service(addr, n, authenticator): """ Start a service """ s = Subscriber(addr, authenticator=authenticator) def do_something(line): pass s.subscribe('test', do_something) started = time.time() for _ in range(n): s.process() s.socket.close() duration = time.time() - started print('Subscriber service stats:') util.print_stats(n, duration) return
python
def start_service(addr, n, authenticator): """ Start a service """ s = Subscriber(addr, authenticator=authenticator) def do_something(line): pass s.subscribe('test', do_something) started = time.time() for _ in range(n): s.process() s.socket.close() duration = time.time() - started print('Subscriber service stats:') util.print_stats(n, duration) return
[ "def", "start_service", "(", "addr", ",", "n", ",", "authenticator", ")", ":", "s", "=", "Subscriber", "(", "addr", ",", "authenticator", "=", "authenticator", ")", "def", "do_something", "(", "line", ")", ":", "pass", "s", ".", "subscribe", "(", "'test'", ",", "do_something", ")", "started", "=", "time", ".", "time", "(", ")", "for", "_", "in", "range", "(", "n", ")", ":", "s", ".", "process", "(", ")", "s", ".", "socket", ".", "close", "(", ")", "duration", "=", "time", ".", "time", "(", ")", "-", "started", "print", "(", "'Subscriber service stats:'", ")", "util", ".", "print_stats", "(", "n", ",", "duration", ")", "return" ]
Start a service
[ "Start", "a", "service" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/benchmarks/bench_pub_sub_auth.py#L13-L31
walkr/nanoservice
benchmarks/bench_pub_sub_auth.py
bench
def bench(client, n): """ Benchmark n requests """ items = list(range(n)) # Time client publish operations # ------------------------------ started = time.time() for i in items: client.publish('test', i) duration = time.time() - started print('Publisher client stats:') util.print_stats(n, duration)
python
def bench(client, n): """ Benchmark n requests """ items = list(range(n)) # Time client publish operations # ------------------------------ started = time.time() for i in items: client.publish('test', i) duration = time.time() - started print('Publisher client stats:') util.print_stats(n, duration)
[ "def", "bench", "(", "client", ",", "n", ")", ":", "items", "=", "list", "(", "range", "(", "n", ")", ")", "# Time client publish operations", "# ------------------------------", "started", "=", "time", ".", "time", "(", ")", "for", "i", "in", "items", ":", "client", ".", "publish", "(", "'test'", ",", "i", ")", "duration", "=", "time", ".", "time", "(", ")", "-", "started", "print", "(", "'Publisher client stats:'", ")", "util", ".", "print_stats", "(", "n", ",", "duration", ")" ]
Benchmark n requests
[ "Benchmark", "n", "requests" ]
train
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/benchmarks/bench_pub_sub_auth.py#L34-L46
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
get_webpack
def get_webpack(request, name='DEFAULT'): """ Get the Webpack object for a given webpack config. Called at most once per request per config name. """ if not hasattr(request, '_webpack_map'): request._webpack_map = {} wp = request._webpack_map.get(name) if wp is None: wp = request._webpack_map[name] = Webpack(request, name) return wp
python
def get_webpack(request, name='DEFAULT'): """ Get the Webpack object for a given webpack config. Called at most once per request per config name. """ if not hasattr(request, '_webpack_map'): request._webpack_map = {} wp = request._webpack_map.get(name) if wp is None: wp = request._webpack_map[name] = Webpack(request, name) return wp
[ "def", "get_webpack", "(", "request", ",", "name", "=", "'DEFAULT'", ")", ":", "if", "not", "hasattr", "(", "request", ",", "'_webpack_map'", ")", ":", "request", ".", "_webpack_map", "=", "{", "}", "wp", "=", "request", ".", "_webpack_map", ".", "get", "(", "name", ")", "if", "wp", "is", "None", ":", "wp", "=", "request", ".", "_webpack_map", "[", "name", "]", "=", "Webpack", "(", "request", ",", "name", ")", "return", "wp" ]
Get the Webpack object for a given webpack config. Called at most once per request per config name.
[ "Get", "the", "Webpack", "object", "for", "a", "given", "webpack", "config", "." ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L212-L223
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
includeme
def includeme(config): """ Add pyramid_webpack methods and config to the app """ settings = config.registry.settings root_package_name = config.root_package.__name__ config.registry.webpack = { 'DEFAULT': WebpackState(settings, root_package_name) } for extra_config in aslist(settings.get('webpack.configs', [])): state = WebpackState(settings, root_package_name, name=extra_config) config.registry.webpack[extra_config] = state # Set up any static views for state in six.itervalues(config.registry.webpack): if state.static_view: config.add_static_view(name=state.static_view_name, path=state.static_view_path, cache_max_age=state.cache_max_age) config.add_request_method(get_webpack, 'webpack')
python
def includeme(config): """ Add pyramid_webpack methods and config to the app """ settings = config.registry.settings root_package_name = config.root_package.__name__ config.registry.webpack = { 'DEFAULT': WebpackState(settings, root_package_name) } for extra_config in aslist(settings.get('webpack.configs', [])): state = WebpackState(settings, root_package_name, name=extra_config) config.registry.webpack[extra_config] = state # Set up any static views for state in six.itervalues(config.registry.webpack): if state.static_view: config.add_static_view(name=state.static_view_name, path=state.static_view_path, cache_max_age=state.cache_max_age) config.add_request_method(get_webpack, 'webpack')
[ "def", "includeme", "(", "config", ")", ":", "settings", "=", "config", ".", "registry", ".", "settings", "root_package_name", "=", "config", ".", "root_package", ".", "__name__", "config", ".", "registry", ".", "webpack", "=", "{", "'DEFAULT'", ":", "WebpackState", "(", "settings", ",", "root_package_name", ")", "}", "for", "extra_config", "in", "aslist", "(", "settings", ".", "get", "(", "'webpack.configs'", ",", "[", "]", ")", ")", ":", "state", "=", "WebpackState", "(", "settings", ",", "root_package_name", ",", "name", "=", "extra_config", ")", "config", ".", "registry", ".", "webpack", "[", "extra_config", "]", "=", "state", "# Set up any static views", "for", "state", "in", "six", ".", "itervalues", "(", "config", ".", "registry", ".", "webpack", ")", ":", "if", "state", ".", "static_view", ":", "config", ".", "add_static_view", "(", "name", "=", "state", ".", "static_view_name", ",", "path", "=", "state", ".", "static_view_path", ",", "cache_max_age", "=", "state", ".", "cache_max_age", ")", "config", ".", "add_request_method", "(", "get_webpack", ",", "'webpack'", ")" ]
Add pyramid_webpack methods and config to the app
[ "Add", "pyramid_webpack", "methods", "and", "config", "to", "the", "app" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L226-L244
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
WebpackState._get_setting
def _get_setting(self, setting, default=None, name=None, inherit=True): """ Helper function to fetch settings, inheriting from the base """ if name is None: name = self.name if name == 'DEFAULT': return self._settings.get('webpack.{0}'.format(setting), default) else: val = self._settings.get('webpack.{0}.{1}'.format(name, setting), SENTINEL) if val is SENTINEL: if inherit: return self._get_setting(setting, default, 'DEFAULT') else: return default else: return val
python
def _get_setting(self, setting, default=None, name=None, inherit=True): """ Helper function to fetch settings, inheriting from the base """ if name is None: name = self.name if name == 'DEFAULT': return self._settings.get('webpack.{0}'.format(setting), default) else: val = self._settings.get('webpack.{0}.{1}'.format(name, setting), SENTINEL) if val is SENTINEL: if inherit: return self._get_setting(setting, default, 'DEFAULT') else: return default else: return val
[ "def", "_get_setting", "(", "self", ",", "setting", ",", "default", "=", "None", ",", "name", "=", "None", ",", "inherit", "=", "True", ")", ":", "if", "name", "is", "None", ":", "name", "=", "self", ".", "name", "if", "name", "==", "'DEFAULT'", ":", "return", "self", ".", "_settings", ".", "get", "(", "'webpack.{0}'", ".", "format", "(", "setting", ")", ",", "default", ")", "else", ":", "val", "=", "self", ".", "_settings", ".", "get", "(", "'webpack.{0}.{1}'", ".", "format", "(", "name", ",", "setting", ")", ",", "SENTINEL", ")", "if", "val", "is", "SENTINEL", ":", "if", "inherit", ":", "return", "self", ".", "_get_setting", "(", "setting", ",", "default", ",", "'DEFAULT'", ")", "else", ":", "return", "default", "else", ":", "return", "val" ]
Helper function to fetch settings, inheriting from the base
[ "Helper", "function", "to", "fetch", "settings", "inheriting", "from", "the", "base" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L93-L108
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
WebpackState.load_stats
def load_stats(self, cache=None, wait=None): """ Load and cache the webpack-stats file """ if cache is None: cache = not self.debug if wait is None: wait = self.debug if not cache or self._stats is None: self._stats = self._load_stats() start = time.time() while wait and self._stats.get('status') == 'compiling': if self.timeout and (time.time() - start > self.timeout): raise RuntimeError("Webpack {0!r} timed out while compiling" .format(self.stats_file.path)) time.sleep(0.1) self._stats = self._load_stats() return self._stats
python
def load_stats(self, cache=None, wait=None): """ Load and cache the webpack-stats file """ if cache is None: cache = not self.debug if wait is None: wait = self.debug if not cache or self._stats is None: self._stats = self._load_stats() start = time.time() while wait and self._stats.get('status') == 'compiling': if self.timeout and (time.time() - start > self.timeout): raise RuntimeError("Webpack {0!r} timed out while compiling" .format(self.stats_file.path)) time.sleep(0.1) self._stats = self._load_stats() return self._stats
[ "def", "load_stats", "(", "self", ",", "cache", "=", "None", ",", "wait", "=", "None", ")", ":", "if", "cache", "is", "None", ":", "cache", "=", "not", "self", ".", "debug", "if", "wait", "is", "None", ":", "wait", "=", "self", ".", "debug", "if", "not", "cache", "or", "self", ".", "_stats", "is", "None", ":", "self", ".", "_stats", "=", "self", ".", "_load_stats", "(", ")", "start", "=", "time", ".", "time", "(", ")", "while", "wait", "and", "self", ".", "_stats", ".", "get", "(", "'status'", ")", "==", "'compiling'", ":", "if", "self", ".", "timeout", "and", "(", "time", ".", "time", "(", ")", "-", "start", ">", "self", ".", "timeout", ")", ":", "raise", "RuntimeError", "(", "\"Webpack {0!r} timed out while compiling\"", ".", "format", "(", "self", ".", "stats_file", ".", "path", ")", ")", "time", ".", "sleep", "(", "0.1", ")", "self", ".", "_stats", "=", "self", ".", "_load_stats", "(", ")", "return", "self", ".", "_stats" ]
Load and cache the webpack-stats file
[ "Load", "and", "cache", "the", "webpack", "-", "stats", "file" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L110-L125
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
WebpackState._load_stats
def _load_stats(self): """ Load the webpack-stats file """ for attempt in range(0, 3): try: with self.stats_file.open() as f: return json.load(f) except ValueError: # If we failed to parse the JSON, it's possible that the # webpack process is writing to it concurrently and it's in a # bad state. Sleep and retry. if attempt < 2: time.sleep(attempt * 0.2) else: raise except IOError: raise IOError( "Could not read stats file {0}. Make sure you are using the " "webpack-bundle-tracker plugin" .format(self.stats_file))
python
def _load_stats(self): """ Load the webpack-stats file """ for attempt in range(0, 3): try: with self.stats_file.open() as f: return json.load(f) except ValueError: # If we failed to parse the JSON, it's possible that the # webpack process is writing to it concurrently and it's in a # bad state. Sleep and retry. if attempt < 2: time.sleep(attempt * 0.2) else: raise except IOError: raise IOError( "Could not read stats file {0}. Make sure you are using the " "webpack-bundle-tracker plugin" .format(self.stats_file))
[ "def", "_load_stats", "(", "self", ")", ":", "for", "attempt", "in", "range", "(", "0", ",", "3", ")", ":", "try", ":", "with", "self", ".", "stats_file", ".", "open", "(", ")", "as", "f", ":", "return", "json", ".", "load", "(", "f", ")", "except", "ValueError", ":", "# If we failed to parse the JSON, it's possible that the", "# webpack process is writing to it concurrently and it's in a", "# bad state. Sleep and retry.", "if", "attempt", "<", "2", ":", "time", ".", "sleep", "(", "attempt", "*", "0.2", ")", "else", ":", "raise", "except", "IOError", ":", "raise", "IOError", "(", "\"Could not read stats file {0}. Make sure you are using the \"", "\"webpack-bundle-tracker plugin\"", ".", "format", "(", "self", ".", "stats_file", ")", ")" ]
Load the webpack-stats file
[ "Load", "the", "webpack", "-", "stats", "file" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L127-L144
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
Webpack._chunk_filter
def _chunk_filter(self, extensions): """ Create a filter from the extensions and ignore files """ if isinstance(extensions, six.string_types): extensions = extensions.split() def _filter(chunk): """ Exclusion filter """ name = chunk['name'] if extensions is not None: if not any(name.endswith(e) for e in extensions): return False for pattern in self.state.ignore_re: if pattern.match(name): return False for pattern in self.state.ignore: if fnmatch.fnmatchcase(name, pattern): return False return True return _filter
python
def _chunk_filter(self, extensions): """ Create a filter from the extensions and ignore files """ if isinstance(extensions, six.string_types): extensions = extensions.split() def _filter(chunk): """ Exclusion filter """ name = chunk['name'] if extensions is not None: if not any(name.endswith(e) for e in extensions): return False for pattern in self.state.ignore_re: if pattern.match(name): return False for pattern in self.state.ignore: if fnmatch.fnmatchcase(name, pattern): return False return True return _filter
[ "def", "_chunk_filter", "(", "self", ",", "extensions", ")", ":", "if", "isinstance", "(", "extensions", ",", "six", ".", "string_types", ")", ":", "extensions", "=", "extensions", ".", "split", "(", ")", "def", "_filter", "(", "chunk", ")", ":", "\"\"\" Exclusion filter \"\"\"", "name", "=", "chunk", "[", "'name'", "]", "if", "extensions", "is", "not", "None", ":", "if", "not", "any", "(", "name", ".", "endswith", "(", "e", ")", "for", "e", "in", "extensions", ")", ":", "return", "False", "for", "pattern", "in", "self", ".", "state", ".", "ignore_re", ":", "if", "pattern", ".", "match", "(", "name", ")", ":", "return", "False", "for", "pattern", "in", "self", ".", "state", ".", "ignore", ":", "if", "fnmatch", ".", "fnmatchcase", "(", "name", ",", "pattern", ")", ":", "return", "False", "return", "True", "return", "_filter" ]
Create a filter from the extensions and ignore files
[ "Create", "a", "filter", "from", "the", "extensions", "and", "ignore", "files" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L163-L181
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
Webpack._add_url
def _add_url(self, chunk): """ Add a 'url' property to a chunk and return it """ if 'url' in chunk: return chunk public_path = chunk.get('publicPath') if public_path: chunk['url'] = public_path else: fullpath = posixpath.join(self.state.static_view_path, chunk['name']) chunk['url'] = self._request.static_url(fullpath) return chunk
python
def _add_url(self, chunk): """ Add a 'url' property to a chunk and return it """ if 'url' in chunk: return chunk public_path = chunk.get('publicPath') if public_path: chunk['url'] = public_path else: fullpath = posixpath.join(self.state.static_view_path, chunk['name']) chunk['url'] = self._request.static_url(fullpath) return chunk
[ "def", "_add_url", "(", "self", ",", "chunk", ")", ":", "if", "'url'", "in", "chunk", ":", "return", "chunk", "public_path", "=", "chunk", ".", "get", "(", "'publicPath'", ")", "if", "public_path", ":", "chunk", "[", "'url'", "]", "=", "public_path", "else", ":", "fullpath", "=", "posixpath", ".", "join", "(", "self", ".", "state", ".", "static_view_path", ",", "chunk", "[", "'name'", "]", ")", "chunk", "[", "'url'", "]", "=", "self", ".", "_request", ".", "static_url", "(", "fullpath", ")", "return", "chunk" ]
Add a 'url' property to a chunk and return it
[ "Add", "a", "url", "property", "to", "a", "chunk", "and", "return", "it" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L183-L194
stevearc/pyramid_webpack
pyramid_webpack/__init__.py
Webpack.get_bundle
def get_bundle(self, bundle_name, extensions=None): """ Get all the chunks contained in a bundle """ if self.stats.get('status') == 'done': bundle = self.stats.get('chunks', {}).get(bundle_name, None) if bundle is None: raise KeyError('No such bundle {0!r}.'.format(bundle_name)) test = self._chunk_filter(extensions) return [self._add_url(c) for c in bundle if test(c)] elif self.stats.get('status') == 'error': raise RuntimeError("{error}: {message}".format(**self.stats)) else: raise RuntimeError( "Bad webpack stats file {0} status: {1!r}" .format(self.state.stats_file, self.stats.get('status')))
python
def get_bundle(self, bundle_name, extensions=None): """ Get all the chunks contained in a bundle """ if self.stats.get('status') == 'done': bundle = self.stats.get('chunks', {}).get(bundle_name, None) if bundle is None: raise KeyError('No such bundle {0!r}.'.format(bundle_name)) test = self._chunk_filter(extensions) return [self._add_url(c) for c in bundle if test(c)] elif self.stats.get('status') == 'error': raise RuntimeError("{error}: {message}".format(**self.stats)) else: raise RuntimeError( "Bad webpack stats file {0} status: {1!r}" .format(self.state.stats_file, self.stats.get('status')))
[ "def", "get_bundle", "(", "self", ",", "bundle_name", ",", "extensions", "=", "None", ")", ":", "if", "self", ".", "stats", ".", "get", "(", "'status'", ")", "==", "'done'", ":", "bundle", "=", "self", ".", "stats", ".", "get", "(", "'chunks'", ",", "{", "}", ")", ".", "get", "(", "bundle_name", ",", "None", ")", "if", "bundle", "is", "None", ":", "raise", "KeyError", "(", "'No such bundle {0!r}.'", ".", "format", "(", "bundle_name", ")", ")", "test", "=", "self", ".", "_chunk_filter", "(", "extensions", ")", "return", "[", "self", ".", "_add_url", "(", "c", ")", "for", "c", "in", "bundle", "if", "test", "(", "c", ")", "]", "elif", "self", ".", "stats", ".", "get", "(", "'status'", ")", "==", "'error'", ":", "raise", "RuntimeError", "(", "\"{error}: {message}\"", ".", "format", "(", "*", "*", "self", ".", "stats", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"Bad webpack stats file {0} status: {1!r}\"", ".", "format", "(", "self", ".", "state", ".", "stats_file", ",", "self", ".", "stats", ".", "get", "(", "'status'", ")", ")", ")" ]
Get all the chunks contained in a bundle
[ "Get", "all", "the", "chunks", "contained", "in", "a", "bundle" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/__init__.py#L196-L209
VisTrails/tej
tej/submission.py
_unique_names
def _unique_names(): """Generates unique sequences of bytes. """ characters = ("abcdefghijklmnopqrstuvwxyz" "0123456789") characters = [characters[i:i + 1] for i in irange(len(characters))] rng = random.Random() while True: letters = [rng.choice(characters) for i in irange(10)] yield ''.join(letters)
python
def _unique_names(): """Generates unique sequences of bytes. """ characters = ("abcdefghijklmnopqrstuvwxyz" "0123456789") characters = [characters[i:i + 1] for i in irange(len(characters))] rng = random.Random() while True: letters = [rng.choice(characters) for i in irange(10)] yield ''.join(letters)
[ "def", "_unique_names", "(", ")", ":", "characters", "=", "(", "\"abcdefghijklmnopqrstuvwxyz\"", "\"0123456789\"", ")", "characters", "=", "[", "characters", "[", "i", ":", "i", "+", "1", "]", "for", "i", "in", "irange", "(", "len", "(", "characters", ")", ")", "]", "rng", "=", "random", ".", "Random", "(", ")", "while", "True", ":", "letters", "=", "[", "rng", ".", "choice", "(", "characters", ")", "for", "i", "in", "irange", "(", "10", ")", "]", "yield", "''", ".", "join", "(", "letters", ")" ]
Generates unique sequences of bytes.
[ "Generates", "unique", "sequences", "of", "bytes", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L31-L40
VisTrails/tej
tej/submission.py
escape_queue
def escape_queue(s): """Escapes the path to a queue, e.g. preserves ~ at the begining. """ if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode('utf-8') if s.startswith('~/'): return '~/' + shell_escape(s[2:]) else: return shell_escape(s)
python
def escape_queue(s): """Escapes the path to a queue, e.g. preserves ~ at the begining. """ if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode('utf-8') if s.startswith('~/'): return '~/' + shell_escape(s[2:]) else: return shell_escape(s)
[ "def", "escape_queue", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "PosixPath", ")", ":", "s", "=", "unicode_", "(", "s", ")", "elif", "isinstance", "(", "s", ",", "bytes", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "if", "s", ".", "startswith", "(", "'~/'", ")", ":", "return", "'~/'", "+", "shell_escape", "(", "s", "[", "2", ":", "]", ")", "else", ":", "return", "shell_escape", "(", "s", ")" ]
Escapes the path to a queue, e.g. preserves ~ at the begining.
[ "Escapes", "the", "path", "to", "a", "queue", "e", ".", "g", ".", "preserves", "~", "at", "the", "begining", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L52-L62
VisTrails/tej
tej/submission.py
parse_ssh_destination
def parse_ssh_destination(destination): """Parses the SSH destination argument. """ match = _re_ssh.match(destination) if not match: raise InvalidDestination("Invalid destination: %s" % destination) user, password, host, port = match.groups() info = {} if user: info['username'] = user else: info['username'] = getpass.getuser() if password: info['password'] = password if port: info['port'] = int(port) info['hostname'] = host return info
python
def parse_ssh_destination(destination): """Parses the SSH destination argument. """ match = _re_ssh.match(destination) if not match: raise InvalidDestination("Invalid destination: %s" % destination) user, password, host, port = match.groups() info = {} if user: info['username'] = user else: info['username'] = getpass.getuser() if password: info['password'] = password if port: info['port'] = int(port) info['hostname'] = host return info
[ "def", "parse_ssh_destination", "(", "destination", ")", ":", "match", "=", "_re_ssh", ".", "match", "(", "destination", ")", "if", "not", "match", ":", "raise", "InvalidDestination", "(", "\"Invalid destination: %s\"", "%", "destination", ")", "user", ",", "password", ",", "host", ",", "port", "=", "match", ".", "groups", "(", ")", "info", "=", "{", "}", "if", "user", ":", "info", "[", "'username'", "]", "=", "user", "else", ":", "info", "[", "'username'", "]", "=", "getpass", ".", "getuser", "(", ")", "if", "password", ":", "info", "[", "'password'", "]", "=", "password", "if", "port", ":", "info", "[", "'port'", "]", "=", "int", "(", "port", ")", "info", "[", "'hostname'", "]", "=", "host", "return", "info" ]
Parses the SSH destination argument.
[ "Parses", "the", "SSH", "destination", "argument", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L75-L93
VisTrails/tej
tej/submission.py
RemoteQueue._ssh_client
def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh
python
def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh
[ "def", "_ssh_client", "(", "self", ")", ":", "ssh", "=", "paramiko", ".", "SSHClient", "(", ")", "ssh", ".", "load_system_host_keys", "(", ")", "ssh", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "RejectPolicy", "(", ")", ")", "return", "ssh" ]
Gets an SSH client to connect with.
[ "Gets", "an", "SSH", "client", "to", "connect", "with", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L201-L207
VisTrails/tej
tej/submission.py
RemoteQueue._connect
def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh
python
def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh
[ "def", "_connect", "(", "self", ")", ":", "ssh", "=", "self", ".", "_ssh_client", "(", ")", "logger", ".", "debug", "(", "\"Connecting with %s\"", ",", "', '", ".", "join", "(", "'%s=%r'", "%", "(", "k", ",", "v", "if", "k", "!=", "\"password\"", "else", "\"***\"", ")", "for", "k", ",", "v", "in", "iteritems", "(", "self", ".", "destination", ")", ")", ")", "ssh", ".", "connect", "(", "*", "*", "self", ".", "destination", ")", "logger", ".", "debug", "(", "\"Connected to %s\"", ",", "self", ".", "destination", "[", "'hostname'", "]", ")", "self", ".", "_ssh", "=", "ssh" ]
Connects via SSH.
[ "Connects", "via", "SSH", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L209-L218
VisTrails/tej
tej/submission.py
RemoteQueue.get_client
def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh
python
def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh
[ "def", "get_client", "(", "self", ")", ":", "if", "self", ".", "_ssh", "is", "None", ":", "self", ".", "_connect", "(", ")", "return", "self", ".", "_ssh", "else", ":", "try", ":", "chan", "=", "self", ".", "_ssh", ".", "get_transport", "(", ")", ".", "open_session", "(", ")", "except", "(", "socket", ".", "error", ",", "paramiko", ".", "SSHException", ")", ":", "logger", ".", "warning", "(", "\"Lost connection, reconnecting...\"", ")", "self", ".", "_ssh", ".", "close", "(", ")", "self", ".", "_connect", "(", ")", "else", ":", "chan", ".", "close", "(", ")", "return", "self", ".", "_ssh" ]
Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary.
[ "Gets", "the", "SSH", "client", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L220-L238
VisTrails/tej
tej/submission.py
RemoteQueue._call
def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close()
python
def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close()
[ "def", "_call", "(", "self", ",", "cmd", ",", "get_output", ")", ":", "server_err", "=", "self", ".", "server_logger", "(", ")", "chan", "=", "self", ".", "get_client", "(", ")", ".", "get_transport", "(", ")", ".", "open_session", "(", ")", "try", ":", "logger", ".", "debug", "(", "\"Invoking %r%s\"", ",", "cmd", ",", "\" (stdout)\"", "if", "get_output", "else", "\"\"", ")", "chan", ".", "exec_command", "(", "'/bin/sh -c %s'", "%", "shell_escape", "(", "cmd", ")", ")", "output", "=", "b''", "while", "True", ":", "r", ",", "w", ",", "e", "=", "select", ".", "select", "(", "[", "chan", "]", ",", "[", "]", ",", "[", "]", ")", "if", "chan", "not", "in", "r", ":", "continue", "# pragma: no cover", "recvd", "=", "False", "while", "chan", ".", "recv_stderr_ready", "(", ")", ":", "data", "=", "chan", ".", "recv_stderr", "(", "1024", ")", "server_err", ".", "append", "(", "data", ")", "recvd", "=", "True", "while", "chan", ".", "recv_ready", "(", ")", ":", "data", "=", "chan", ".", "recv", "(", "1024", ")", "if", "get_output", ":", "output", "+=", "data", "recvd", "=", "True", "if", "not", "recvd", "and", "chan", ".", "exit_status_ready", "(", ")", ":", "break", "output", "=", "output", ".", "rstrip", "(", "b'\\r\\n'", ")", "return", "chan", ".", "recv_exit_status", "(", ")", ",", "output", "finally", ":", "server_err", ".", "done", "(", ")", "chan", ".", "close", "(", ")" ]
Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned.
[ "Calls", "a", "command", "through", "the", "SSH", "connection", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L243-L277
VisTrails/tej
tej/submission.py
RemoteQueue.check_call
def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
python
def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
[ "def", "check_call", "(", "self", ",", "cmd", ")", ":", "ret", ",", "_", "=", "self", ".", "_call", "(", "cmd", ",", "False", ")", "if", "ret", "!=", "0", ":", "# pragma: no cover", "raise", "RemoteCommandFailure", "(", "command", "=", "cmd", ",", "ret", "=", "ret", ")" ]
Calls a command through SSH.
[ "Calls", "a", "command", "through", "SSH", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L279-L284
VisTrails/tej
tej/submission.py
RemoteQueue.check_output
def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output
python
def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output
[ "def", "check_output", "(", "self", ",", "cmd", ")", ":", "ret", ",", "output", "=", "self", ".", "_call", "(", "cmd", ",", "True", ")", "if", "ret", "!=", "0", ":", "# pragma: no cover", "raise", "RemoteCommandFailure", "(", "command", "=", "cmd", ",", "ret", "=", "ret", ")", "logger", ".", "debug", "(", "\"Output: %r\"", ",", "output", ")", "return", "output" ]
Calls a command through SSH and returns its output.
[ "Calls", "a", "command", "through", "SSH", "and", "returns", "its", "output", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L286-L293
VisTrails/tej
tej/submission.py
RemoteQueue._resolve_queue
def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way")
python
def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way")
[ "def", "_resolve_queue", "(", "self", ",", "queue", ",", "depth", "=", "0", ",", "links", "=", "None", ")", ":", "if", "depth", "==", "0", ":", "logger", ".", "debug", "(", "\"resolve_queue(%s)\"", ",", "queue", ")", "answer", "=", "self", ".", "check_output", "(", "'if [ -d %(queue)s ]; then '", "' cd %(queue)s; echo \"dir\"; cat version; pwd; '", "'elif [ -f %(queue)s ]; then '", "' cat %(queue)s; '", "'else '", "' echo no; '", "'fi'", "%", "{", "'queue'", ":", "escape_queue", "(", "queue", ")", "}", ")", "if", "answer", "==", "b'no'", ":", "if", "depth", ">", "0", ":", "logger", ".", "debug", "(", "\"Broken link at depth=%d\"", ",", "depth", ")", "else", ":", "logger", ".", "debug", "(", "\"Path doesn't exist\"", ")", "return", "None", ",", "depth", "elif", "answer", ".", "startswith", "(", "b'dir\\n'", ")", ":", "version", ",", "runtime", ",", "path", "=", "answer", "[", "4", ":", "]", ".", "split", "(", "b'\\n'", ",", "2", ")", "try", ":", "version", "=", "tuple", "(", "int", "(", "e", ")", "for", "e", "in", "version", ".", "decode", "(", "'ascii'", ",", "'ignore'", ")", ".", "split", "(", "'.'", ")", ")", "except", "ValueError", ":", "version", "=", "0", ",", "0", "if", "version", "[", ":", "2", "]", "!=", "self", ".", "PROTOCOL_VERSION", ":", "raise", "QueueExists", "(", "msg", "=", "\"Queue exists and is using incompatible protocol \"", "\"version %s\"", "%", "'.'", ".", "join", "(", "'%s'", "%", "e", "for", "e", "in", "version", ")", ")", "path", "=", "PosixPath", "(", "path", ")", "runtime", "=", "runtime", ".", "decode", "(", "'ascii'", ",", "'replace'", ")", "if", "self", ".", "need_runtime", "is", "not", "None", ":", "if", "(", "self", ".", "need_runtime", "is", "not", "None", "and", "runtime", "not", "in", "self", ".", "need_runtime", ")", ":", "raise", "QueueExists", "(", "msg", "=", "\"Queue exists and is using explicitely disallowed \"", "\"runtime %s\"", "%", "runtime", ")", "logger", ".", "debug", "(", "\"Found directory at %s, depth=%d, runtime=%s\"", ",", "path", ",", "depth", ",", "runtime", ")", "return", "path", ",", "depth", "elif", "answer", ".", "startswith", "(", "b'tejdir: '", ")", ":", "new", "=", "queue", ".", "parent", "/", "answer", "[", "8", ":", "]", "logger", ".", "debug", "(", "\"Found link to %s, recursing\"", ",", "new", ")", "if", "links", "is", "not", "None", ":", "links", ".", "append", "(", "queue", ")", "return", "self", ".", "_resolve_queue", "(", "new", ",", "depth", "+", "1", ")", "else", ":", "# pragma: no cover", "logger", ".", "debug", "(", "\"Server returned %r\"", ",", "answer", ")", "raise", "RemoteCommandFailure", "(", "msg", "=", "\"Queue resolution command failed \"", "\"in unexpected way\"", ")" ]
Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location).
[ "Finds", "the", "location", "of", "tej", "s", "queue", "directory", "on", "the", "server", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L295-L353
VisTrails/tej
tej/submission.py
RemoteQueue._get_queue
def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue
python
def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue
[ "def", "_get_queue", "(", "self", ")", ":", "if", "self", ".", "_queue", "is", "None", ":", "self", ".", "_links", "=", "[", "]", "queue", ",", "depth", "=", "self", ".", "_resolve_queue", "(", "self", ".", "queue", ",", "links", "=", "self", ".", "_links", ")", "if", "queue", "is", "None", "and", "depth", ">", "0", ":", "raise", "QueueLinkBroken", "self", ".", "_queue", "=", "queue", "return", "self", ".", "_queue" ]
Gets the actual location of the queue, or None.
[ "Gets", "the", "actual", "location", "of", "the", "queue", "or", "None", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L355-L364
VisTrails/tej
tej/submission.py
RemoteQueue.setup
def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)})
python
def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)})
[ "def", "setup", "(", "self", ",", "links", "=", "None", ",", "force", "=", "False", ",", "only_links", "=", "False", ")", ":", "if", "not", "links", ":", "links", "=", "[", "]", "if", "only_links", ":", "logger", ".", "info", "(", "\"Only creating links\"", ")", "for", "link", "in", "links", ":", "self", ".", "check_call", "(", "'echo \"tejdir:\" %(queue)s > %(link)s'", "%", "{", "'queue'", ":", "escape_queue", "(", "self", ".", "queue", ")", ",", "'link'", ":", "escape_queue", "(", "link", ")", "}", ")", "return", "queue", ",", "depth", "=", "self", ".", "_resolve_queue", "(", "self", ".", "queue", ")", "if", "queue", "is", "not", "None", "or", "depth", ">", "0", ":", "if", "force", ":", "if", "queue", "is", "None", ":", "logger", ".", "info", "(", "\"Replacing broken link\"", ")", "elif", "depth", ">", "0", ":", "logger", ".", "info", "(", "\"Replacing link to %s...\"", ",", "queue", ")", "else", ":", "logger", ".", "info", "(", "\"Replacing existing queue...\"", ")", "self", ".", "check_call", "(", "'rm -Rf %s'", "%", "escape_queue", "(", "self", ".", "queue", ")", ")", "else", ":", "if", "queue", "is", "not", "None", "and", "depth", ">", "0", ":", "raise", "QueueExists", "(", "\"Queue already exists (links to %s)\\n\"", "\"Use --force to replace\"", "%", "queue", ")", "elif", "depth", ">", "0", ":", "raise", "QueueExists", "(", "\"Broken link exists\\n\"", "\"Use --force to replace\"", ")", "else", ":", "raise", "QueueExists", "(", "\"Queue already exists\\n\"", "\"Use --force to replace\"", ")", "queue", "=", "self", ".", "_setup", "(", ")", "for", "link", "in", "links", ":", "self", ".", "check_call", "(", "'echo \"tejdir:\" %(queue)s > %(link)s'", "%", "{", "'queue'", ":", "escape_queue", "(", "queue", ")", ",", "'link'", ":", "escape_queue", "(", "link", ")", "}", ")" ]
Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations.
[ "Installs", "the", "runtime", "at", "the", "target", "location", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L366-L411
VisTrails/tej
tej/submission.py
RemoteQueue._setup
def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue
python
def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue
[ "def", "_setup", "(", "self", ")", ":", "# Expands ~user in queue", "if", "self", ".", "queue", ".", "path", "[", "0", ":", "1", "]", "==", "b'/'", ":", "queue", "=", "self", ".", "queue", "else", ":", "if", "self", ".", "queue", ".", "path", "[", "0", ":", "1", "]", "==", "b'~'", ":", "output", "=", "self", ".", "check_output", "(", "'echo %s'", "%", "escape_queue", "(", "self", ".", "queue", ")", ")", "queue", "=", "PosixPath", "(", "output", ".", "rstrip", "(", "b'\\r\\n'", ")", ")", "else", ":", "output", "=", "self", ".", "check_output", "(", "'pwd'", ")", "queue", "=", "PosixPath", "(", "output", ".", "rstrip", "(", "b'\\r\\n'", ")", ")", "/", "self", ".", "queue", "logger", ".", "debug", "(", "\"Resolved to %s\"", ",", "queue", ")", "# Select runtime", "if", "not", "self", ".", "setup_runtime", ":", "# Autoselect", "if", "self", ".", "_call", "(", "'which qsub'", ",", "False", ")", "[", "0", "]", "==", "0", ":", "logger", ".", "debug", "(", "\"qsub is available, using runtime 'pbs'\"", ")", "runtime", "=", "'pbs'", "else", ":", "logger", ".", "debug", "(", "\"qsub not found, using runtime 'default'\"", ")", "runtime", "=", "'default'", "else", ":", "runtime", "=", "self", ".", "setup_runtime", "if", "self", ".", "need_runtime", "is", "not", "None", "and", "runtime", "not", "in", "self", ".", "need_runtime", ":", "raise", "ValueError", "(", "\"About to setup runtime %s but that wouldn't \"", "\"match explicitely allowed runtimes\"", "%", "runtime", ")", "logger", ".", "info", "(", "\"Installing runtime %s%s at %s\"", ",", "runtime", ",", "\"\"", "if", "self", ".", "setup_runtime", "else", "\" (auto)\"", ",", "self", ".", "queue", ")", "# Uploads runtime", "scp_client", "=", "self", ".", "get_scp_client", "(", ")", "filename", "=", "pkg_resources", ".", "resource_filename", "(", "'tej'", ",", "'remotes/%s'", "%", "runtime", ")", "scp_client", ".", "put", "(", "filename", ",", "str", "(", "queue", ")", ",", "recursive", "=", "True", ")", "logger", ".", "debug", "(", "\"Files uploaded\"", ")", "# Runs post-setup script", "self", ".", "check_call", "(", "'/bin/sh %s'", "%", "shell_escape", "(", "queue", "/", "'commands/setup'", ")", ")", "logger", ".", "debug", "(", "\"Post-setup script done\"", ")", "self", ".", "_queue", "=", "queue", "return", "queue" ]
Actually installs the runtime.
[ "Actually", "installs", "the", "runtime", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L413-L462
VisTrails/tej
tej/submission.py
RemoteQueue.submit
def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id
python
def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id
[ "def", "submit", "(", "self", ",", "job_id", ",", "directory", ",", "script", "=", "None", ")", ":", "if", "job_id", "is", "None", ":", "job_id", "=", "'%s_%s_%s'", "%", "(", "Path", "(", "directory", ")", ".", "unicodename", ",", "self", ".", "destination", "[", "'username'", "]", ",", "make_unique_name", "(", ")", ")", "else", ":", "check_jobid", "(", "job_id", ")", "queue", "=", "self", ".", "_get_queue", "(", ")", "if", "queue", "is", "None", ":", "queue", "=", "self", ".", "_setup", "(", ")", "if", "script", "is", "None", ":", "script", "=", "'start.sh'", "# Create directory", "ret", ",", "target", "=", "self", ".", "_call", "(", "'%s %s'", "%", "(", "shell_escape", "(", "queue", "/", "'commands/new_job'", ")", ",", "job_id", ")", ",", "True", ")", "if", "ret", "==", "4", ":", "raise", "JobAlreadyExists", "elif", "ret", "!=", "0", ":", "raise", "JobNotFound", "(", "\"Couldn't create job\"", ")", "target", "=", "PosixPath", "(", "target", ")", "logger", ".", "debug", "(", "\"Server created directory %s\"", ",", "target", ")", "# Upload to directory", "try", ":", "scp_client", "=", "self", ".", "get_scp_client", "(", ")", "scp_client", ".", "put", "(", "str", "(", "Path", "(", "directory", ")", ")", ",", "str", "(", "target", ")", ",", "recursive", "=", "True", ")", "except", "BaseException", "as", "e", ":", "try", ":", "self", ".", "delete", "(", "job_id", ")", "except", "BaseException", ":", "raise", "e", "raise", "logger", ".", "debug", "(", "\"Files uploaded\"", ")", "# Submit job", "self", ".", "check_call", "(", "'%s %s %s %s'", "%", "(", "shell_escape", "(", "queue", "/", "'commands/submit'", ")", ",", "job_id", ",", "shell_escape", "(", "target", ")", ",", "shell_escape", "(", "script", ")", ")", ")", "logger", ".", "info", "(", "\"Submitted job %s\"", ",", "job_id", ")", "return", "job_id" ]
Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error.
[ "Submits", "a", "job", "to", "the", "queue", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L464-L516
VisTrails/tej
tej/submission.py
RemoteQueue.status
def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret)
python
def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret)
[ "def", "status", "(", "self", ",", "job_id", ")", ":", "check_jobid", "(", "job_id", ")", "queue", "=", "self", ".", "_get_queue", "(", ")", "if", "queue", "is", "None", ":", "raise", "QueueDoesntExist", "ret", ",", "output", "=", "self", ".", "_call", "(", "'%s %s'", "%", "(", "shell_escape", "(", "queue", "/", "'commands/status'", ")", ",", "job_id", ")", ",", "True", ")", "if", "ret", "==", "0", ":", "directory", ",", "result", "=", "output", ".", "splitlines", "(", ")", "result", "=", "result", ".", "decode", "(", "'utf-8'", ")", "return", "RemoteQueue", ".", "JOB_DONE", ",", "PosixPath", "(", "directory", ")", ",", "result", "elif", "ret", "==", "2", ":", "directory", "=", "output", ".", "splitlines", "(", ")", "[", "0", "]", "return", "RemoteQueue", ".", "JOB_RUNNING", ",", "PosixPath", "(", "directory", ")", ",", "None", "elif", "ret", "==", "3", ":", "raise", "JobNotFound", "else", ":", "raise", "RemoteCommandFailure", "(", "command", "=", "\"commands/status\"", ",", "ret", "=", "ret", ")" ]
Gets the status of a previously-submitted job.
[ "Gets", "the", "status", "of", "a", "previously", "-", "submitted", "job", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L518-L542
VisTrails/tej
tej/submission.py
RemoteQueue.download
def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive)
python
def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive)
[ "def", "download", "(", "self", ",", "job_id", ",", "files", ",", "*", "*", "kwargs", ")", ":", "check_jobid", "(", "job_id", ")", "if", "not", "files", ":", "return", "if", "isinstance", "(", "files", ",", "string_types", ")", ":", "files", "=", "[", "files", "]", "directory", "=", "False", "recursive", "=", "kwargs", ".", "pop", "(", "'recursive'", ",", "True", ")", "if", "'destination'", "in", "kwargs", "and", "'directory'", "in", "kwargs", ":", "raise", "TypeError", "(", "\"Only use one of 'destination' or 'directory'\"", ")", "elif", "'destination'", "in", "kwargs", ":", "destination", "=", "Path", "(", "kwargs", ".", "pop", "(", "'destination'", ")", ")", "if", "len", "(", "files", ")", "!=", "1", ":", "raise", "ValueError", "(", "\"'destination' specified but multiple files \"", "\"given; did you mean to use 'directory'?\"", ")", "elif", "'directory'", "in", "kwargs", ":", "destination", "=", "Path", "(", "kwargs", ".", "pop", "(", "'directory'", ")", ")", "directory", "=", "True", "if", "kwargs", ":", "raise", "TypeError", "(", "\"Got unexpected keyword arguments\"", ")", "# Might raise JobNotFound", "status", ",", "target", ",", "result", "=", "self", ".", "status", "(", "job_id", ")", "scp_client", "=", "self", ".", "get_scp_client", "(", ")", "for", "filename", "in", "files", ":", "logger", ".", "info", "(", "\"Downloading %s\"", ",", "target", "/", "filename", ")", "if", "directory", ":", "scp_client", ".", "get", "(", "str", "(", "target", "/", "filename", ")", ",", "str", "(", "destination", "/", "filename", ")", ",", "recursive", "=", "recursive", ")", "else", ":", "scp_client", ".", "get", "(", "str", "(", "target", "/", "filename", ")", ",", "str", "(", "destination", ")", ",", "recursive", "=", "recursive", ")" ]
Downloads files from server.
[ "Downloads", "files", "from", "server", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L544-L582
VisTrails/tej
tej/submission.py
RemoteQueue.kill
def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret)
python
def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret)
[ "def", "kill", "(", "self", ",", "job_id", ")", ":", "check_jobid", "(", "job_id", ")", "queue", "=", "self", ".", "_get_queue", "(", ")", "if", "queue", "is", "None", ":", "raise", "QueueDoesntExist", "ret", ",", "output", "=", "self", ".", "_call", "(", "'%s %s'", "%", "(", "shell_escape", "(", "queue", "/", "'commands/kill'", ")", ",", "job_id", ")", ",", "False", ")", "if", "ret", "==", "3", ":", "raise", "JobNotFound", "elif", "ret", "!=", "0", ":", "raise", "RemoteCommandFailure", "(", "command", "=", "'commands/kill'", ",", "ret", "=", "ret", ")" ]
Kills a job on the server.
[ "Kills", "a", "job", "on", "the", "server", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L584-L601
VisTrails/tej
tej/submission.py
RemoteQueue.list
def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info
python
def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info
[ "def", "list", "(", "self", ")", ":", "queue", "=", "self", ".", "_get_queue", "(", ")", "if", "queue", "is", "None", ":", "raise", "QueueDoesntExist", "output", "=", "self", ".", "check_output", "(", "'%s'", "%", "shell_escape", "(", "queue", "/", "'commands/list'", ")", ")", "job_id", ",", "info", "=", "None", ",", "None", "for", "line", "in", "output", ".", "splitlines", "(", ")", ":", "line", "=", "line", ".", "decode", "(", "'utf-8'", ")", "if", "line", ".", "startswith", "(", "' '", ")", ":", "key", ",", "value", "=", "line", "[", "4", ":", "]", ".", "split", "(", "': '", ",", "1", ")", "info", "[", "key", "]", "=", "value", "else", ":", "if", "job_id", "is", "not", "None", ":", "yield", "job_id", ",", "info", "job_id", "=", "line", "info", "=", "{", "}", "if", "job_id", "is", "not", "None", ":", "yield", "job_id", ",", "info" ]
Lists the jobs on the server.
[ "Lists", "the", "jobs", "on", "the", "server", "." ]
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L624-L646
jaraco/jaraco.text
jaraco/text.py
multi_substitution
def multi_substitution(*substitutions): """ Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz' """ substitutions = itertools.starmap(substitution, substitutions) # compose function applies last function first, so reverse the # substitutions to get the expected order. substitutions = reversed(tuple(substitutions)) return compose(*substitutions)
python
def multi_substitution(*substitutions): """ Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz' """ substitutions = itertools.starmap(substitution, substitutions) # compose function applies last function first, so reverse the # substitutions to get the expected order. substitutions = reversed(tuple(substitutions)) return compose(*substitutions)
[ "def", "multi_substitution", "(", "*", "substitutions", ")", ":", "substitutions", "=", "itertools", ".", "starmap", "(", "substitution", ",", "substitutions", ")", "# compose function applies last function first, so reverse the", "# substitutions to get the expected order.", "substitutions", "=", "reversed", "(", "tuple", "(", "substitutions", ")", ")", "return", "compose", "(", "*", "substitutions", ")" ]
Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz'
[ "Take", "a", "sequence", "of", "pairs", "specifying", "substitutions", "and", "create", "a", "function", "that", "performs", "those", "substitutions", "." ]
train
https://github.com/jaraco/jaraco.text/blob/0fe070e9241cb1fdb737516a3f57da94a2618376/jaraco/text.py#L20-L32
jaraco/jaraco.text
jaraco/text.py
simple_html_strip
def simple_html_strip(s): r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) What about multiple lines? """ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL) texts = ( match.group(3) or '' for match in html_stripper.finditer(s) ) return ''.join(texts)
python
def simple_html_strip(s): r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) What about multiple lines? """ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL) texts = ( match.group(3) or '' for match in html_stripper.finditer(s) ) return ''.join(texts)
[ "def", "simple_html_strip", "(", "s", ")", ":", "html_stripper", "=", "re", ".", "compile", "(", "'(<!--.*?-->)|(<[^>]*>)|([^<]+)'", ",", "re", ".", "DOTALL", ")", "texts", "=", "(", "match", ".", "group", "(", "3", ")", "or", "''", "for", "match", "in", "html_stripper", ".", "finditer", "(", "s", ")", ")", "return", "''", ".", "join", "(", "texts", ")" ]
r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simple_html_strip('What about<br/>\nmultiple lines?')) What about multiple lines?
[ "r", "Remove", "HTML", "from", "the", "string", "s", "." ]
train
https://github.com/jaraco/jaraco.text/blob/0fe070e9241cb1fdb737516a3f57da94a2618376/jaraco/text.py#L279-L302
jaraco/jaraco.text
jaraco/text.py
remove_prefix
def remove_prefix(text, prefix): """ Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special' """ null, prefix, rest = text.rpartition(prefix) return rest
python
def remove_prefix(text, prefix): """ Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special' """ null, prefix, rest = text.rpartition(prefix) return rest
[ "def", "remove_prefix", "(", "text", ",", "prefix", ")", ":", "null", ",", "prefix", ",", "rest", "=", "text", ".", "rpartition", "(", "prefix", ")", "return", "rest" ]
Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special'
[ "Remove", "the", "prefix", "from", "the", "text", "if", "it", "exists", "." ]
train
https://github.com/jaraco/jaraco.text/blob/0fe070e9241cb1fdb737516a3f57da94a2618376/jaraco/text.py#L378-L389
jaraco/jaraco.text
jaraco/text.py
remove_suffix
def remove_suffix(text, suffix): """ Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special' """ rest, suffix, null = text.partition(suffix) return rest
python
def remove_suffix(text, suffix): """ Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special' """ rest, suffix, null = text.partition(suffix) return rest
[ "def", "remove_suffix", "(", "text", ",", "suffix", ")", ":", "rest", ",", "suffix", ",", "null", "=", "text", ".", "partition", "(", "suffix", ")", "return", "rest" ]
Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special'
[ "Remove", "the", "suffix", "from", "the", "text", "if", "it", "exists", "." ]
train
https://github.com/jaraco/jaraco.text/blob/0fe070e9241cb1fdb737516a3f57da94a2618376/jaraco/text.py#L392-L403
jaraco/jaraco.text
jaraco/text.py
Stripper.common_prefix
def common_prefix(s1, s2): """ Return the common prefix of two lines. """ index = min(len(s1), len(s2)) while s1[:index] != s2[:index]: index -= 1 return s1[:index]
python
def common_prefix(s1, s2): """ Return the common prefix of two lines. """ index = min(len(s1), len(s2)) while s1[:index] != s2[:index]: index -= 1 return s1[:index]
[ "def", "common_prefix", "(", "s1", ",", "s2", ")", ":", "index", "=", "min", "(", "len", "(", "s1", ")", ",", "len", "(", "s2", ")", ")", "while", "s1", "[", ":", "index", "]", "!=", "s2", "[", ":", "index", "]", ":", "index", "-=", "1", "return", "s1", "[", ":", "index", "]" ]
Return the common prefix of two lines.
[ "Return", "the", "common", "prefix", "of", "two", "lines", "." ]
train
https://github.com/jaraco/jaraco.text/blob/0fe070e9241cb1fdb737516a3f57da94a2618376/jaraco/text.py#L368-L375
stevearc/pyramid_webpack
pyramid_webpack/jinja2ext.py
WebpackExtension._get_graph
def _get_graph(self, ctx, bundle, extensions, caller=None): """ Run a graph and render the tag contents for each output """ request = ctx.get('request') if request is None: request = get_current_request() if ':' in bundle: config_name, bundle = bundle.split(':') else: config_name = 'DEFAULT' webpack = request.webpack(config_name) assets = (caller(a) for a in webpack.get_bundle(bundle, extensions)) return ''.join(assets)
python
def _get_graph(self, ctx, bundle, extensions, caller=None): """ Run a graph and render the tag contents for each output """ request = ctx.get('request') if request is None: request = get_current_request() if ':' in bundle: config_name, bundle = bundle.split(':') else: config_name = 'DEFAULT' webpack = request.webpack(config_name) assets = (caller(a) for a in webpack.get_bundle(bundle, extensions)) return ''.join(assets)
[ "def", "_get_graph", "(", "self", ",", "ctx", ",", "bundle", ",", "extensions", ",", "caller", "=", "None", ")", ":", "request", "=", "ctx", ".", "get", "(", "'request'", ")", "if", "request", "is", "None", ":", "request", "=", "get_current_request", "(", ")", "if", "':'", "in", "bundle", ":", "config_name", ",", "bundle", "=", "bundle", ".", "split", "(", "':'", ")", "else", ":", "config_name", "=", "'DEFAULT'", "webpack", "=", "request", ".", "webpack", "(", "config_name", ")", "assets", "=", "(", "caller", "(", "a", ")", "for", "a", "in", "webpack", ".", "get_bundle", "(", "bundle", ",", "extensions", ")", ")", "return", "''", ".", "join", "(", "assets", ")" ]
Run a graph and render the tag contents for each output
[ "Run", "a", "graph", "and", "render", "the", "tag", "contents", "for", "each", "output" ]
train
https://github.com/stevearc/pyramid_webpack/blob/4fcad26271fd6e8c270e19c7943240fea6d8c484/pyramid_webpack/jinja2ext.py#L60-L71
GaretJax/lancet
lancet/commands/workflow.py
activate
def activate(lancet, method, project): """Switch to this project.""" with taskstatus("Looking up project") as ts: if method == "key": func = get_project_keys elif method == "dir": func = get_project_keys for key, project_path in func(lancet): if key.lower() == project.lower(): break else: ts.abort( 'Project "{}" not found (using {}-based lookup)', project, method, ) # Load the configuration config = load_config(os.path.join(project_path, LOCAL_CONFIG)) # cd to the project directory lancet.defer_to_shell("cd", project_path) # Activate virtualenv venv = config.get("lancet", "virtualenv", fallback=None) if venv: venv_path = os.path.join(project_path, os.path.expanduser(venv)) activate_script = os.path.join(venv_path, "bin", "activate") lancet.defer_to_shell("source", activate_script) else: if "VIRTUAL_ENV" in os.environ: lancet.defer_to_shell("deactivate")
python
def activate(lancet, method, project): """Switch to this project.""" with taskstatus("Looking up project") as ts: if method == "key": func = get_project_keys elif method == "dir": func = get_project_keys for key, project_path in func(lancet): if key.lower() == project.lower(): break else: ts.abort( 'Project "{}" not found (using {}-based lookup)', project, method, ) # Load the configuration config = load_config(os.path.join(project_path, LOCAL_CONFIG)) # cd to the project directory lancet.defer_to_shell("cd", project_path) # Activate virtualenv venv = config.get("lancet", "virtualenv", fallback=None) if venv: venv_path = os.path.join(project_path, os.path.expanduser(venv)) activate_script = os.path.join(venv_path, "bin", "activate") lancet.defer_to_shell("source", activate_script) else: if "VIRTUAL_ENV" in os.environ: lancet.defer_to_shell("deactivate")
[ "def", "activate", "(", "lancet", ",", "method", ",", "project", ")", ":", "with", "taskstatus", "(", "\"Looking up project\"", ")", "as", "ts", ":", "if", "method", "==", "\"key\"", ":", "func", "=", "get_project_keys", "elif", "method", "==", "\"dir\"", ":", "func", "=", "get_project_keys", "for", "key", ",", "project_path", "in", "func", "(", "lancet", ")", ":", "if", "key", ".", "lower", "(", ")", "==", "project", ".", "lower", "(", ")", ":", "break", "else", ":", "ts", ".", "abort", "(", "'Project \"{}\" not found (using {}-based lookup)'", ",", "project", ",", "method", ",", ")", "# Load the configuration", "config", "=", "load_config", "(", "os", ".", "path", ".", "join", "(", "project_path", ",", "LOCAL_CONFIG", ")", ")", "# cd to the project directory", "lancet", ".", "defer_to_shell", "(", "\"cd\"", ",", "project_path", ")", "# Activate virtualenv", "venv", "=", "config", ".", "get", "(", "\"lancet\"", ",", "\"virtualenv\"", ",", "fallback", "=", "None", ")", "if", "venv", ":", "venv_path", "=", "os", ".", "path", ".", "join", "(", "project_path", ",", "os", ".", "path", ".", "expanduser", "(", "venv", ")", ")", "activate_script", "=", "os", ".", "path", ".", "join", "(", "venv_path", ",", "\"bin\"", ",", "\"activate\"", ")", "lancet", ".", "defer_to_shell", "(", "\"source\"", ",", "activate_script", ")", "else", ":", "if", "\"VIRTUAL_ENV\"", "in", "os", ".", "environ", ":", "lancet", ".", "defer_to_shell", "(", "\"deactivate\"", ")" ]
Switch to this project.
[ "Switch", "to", "this", "project", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/commands/workflow.py#L24-L56
GaretJax/lancet
lancet/commands/workflow.py
workon
def workon(ctx, issue_id, new, base_branch): """ Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. If a branch with the same name as the one to be created already exists, it is checked out instead. Variations in the branch name occuring after the issue ID are accounted for and the branch renamed to match the new issue summary. If the `default_project` directive is correctly configured, it is enough to give the issue ID (instead of the full project prefix + issue ID). """ lancet = ctx.obj if not issue_id and not new: raise click.UsageError("Provide either an issue ID or the --new flag.") elif issue_id and new: raise click.UsageError( "Provide either an issue ID or the --new flag, but not both." ) if new: # Create a new issue summary = click.prompt("Issue summary") issue = create_issue( lancet, summary=summary, add_to_active_sprint=True ) else: issue = get_issue(lancet, issue_id) username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") if not base_branch: base_branch = lancet.config.get("repository", "base_branch") # Get the working branch branch = get_branch(lancet, issue, base_branch) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Checking out working branch") as ts: lancet.repo.checkout(branch.name) ts.ok('Checked out working branch based on "{}"'.format(base_branch)) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
python
def workon(ctx, issue_id, new, base_branch): """ Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. If a branch with the same name as the one to be created already exists, it is checked out instead. Variations in the branch name occuring after the issue ID are accounted for and the branch renamed to match the new issue summary. If the `default_project` directive is correctly configured, it is enough to give the issue ID (instead of the full project prefix + issue ID). """ lancet = ctx.obj if not issue_id and not new: raise click.UsageError("Provide either an issue ID or the --new flag.") elif issue_id and new: raise click.UsageError( "Provide either an issue ID or the --new flag, but not both." ) if new: # Create a new issue summary = click.prompt("Issue summary") issue = create_issue( lancet, summary=summary, add_to_active_sprint=True ) else: issue = get_issue(lancet, issue_id) username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") if not base_branch: base_branch = lancet.config.get("repository", "base_branch") # Get the working branch branch = get_branch(lancet, issue, base_branch) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Checking out working branch") as ts: lancet.repo.checkout(branch.name) ts.ok('Checked out working branch based on "{}"'.format(base_branch)) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
[ "def", "workon", "(", "ctx", ",", "issue_id", ",", "new", ",", "base_branch", ")", ":", "lancet", "=", "ctx", ".", "obj", "if", "not", "issue_id", "and", "not", "new", ":", "raise", "click", ".", "UsageError", "(", "\"Provide either an issue ID or the --new flag.\"", ")", "elif", "issue_id", "and", "new", ":", "raise", "click", ".", "UsageError", "(", "\"Provide either an issue ID or the --new flag, but not both.\"", ")", "if", "new", ":", "# Create a new issue", "summary", "=", "click", ".", "prompt", "(", "\"Issue summary\"", ")", "issue", "=", "create_issue", "(", "lancet", ",", "summary", "=", "summary", ",", "add_to_active_sprint", "=", "True", ")", "else", ":", "issue", "=", "get_issue", "(", "lancet", ",", "issue_id", ")", "username", "=", "lancet", ".", "tracker", ".", "whoami", "(", ")", "active_status", "=", "lancet", ".", "config", ".", "get", "(", "\"tracker\"", ",", "\"active_status\"", ")", "if", "not", "base_branch", ":", "base_branch", "=", "lancet", ".", "config", ".", "get", "(", "\"repository\"", ",", "\"base_branch\"", ")", "# Get the working branch", "branch", "=", "get_branch", "(", "lancet", ",", "issue", ",", "base_branch", ")", "# Make sure the issue is in a correct status", "transition", "=", "get_transition", "(", "ctx", ",", "lancet", ",", "issue", ",", "active_status", ")", "# Make sure the issue is assigned to us", "assign_issue", "(", "lancet", ",", "issue", ",", "username", ",", "active_status", ")", "# Activate environment", "set_issue_status", "(", "lancet", ",", "issue", ",", "active_status", ",", "transition", ")", "with", "taskstatus", "(", "\"Checking out working branch\"", ")", "as", "ts", ":", "lancet", ".", "repo", ".", "checkout", "(", "branch", ".", "name", ")", "ts", ".", "ok", "(", "'Checked out working branch based on \"{}\"'", ".", "format", "(", "base_branch", ")", ")", "with", "taskstatus", "(", "\"Starting harvest timer\"", ")", "as", "ts", ":", "lancet", ".", "timer", ".", "start", "(", "issue", ")", "ts", ".", "ok", "(", "\"Started harvest timer\"", ")" ]
Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. If a branch with the same name as the one to be created already exists, it is checked out instead. Variations in the branch name occuring after the issue ID are accounted for and the branch renamed to match the new issue summary. If the `default_project` directive is correctly configured, it is enough to give the issue ID (instead of the full project prefix + issue ID).
[ "Start", "work", "on", "a", "given", "issue", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/commands/workflow.py#L78-L135
GaretJax/lancet
lancet/commands/workflow.py
time
def time(lancet, issue): """ Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch. """ issue = get_issue(lancet, issue) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
python
def time(lancet, issue): """ Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch. """ issue = get_issue(lancet, issue) with taskstatus("Starting harvest timer") as ts: lancet.timer.start(issue) ts.ok("Started harvest timer")
[ "def", "time", "(", "lancet", ",", "issue", ")", ":", "issue", "=", "get_issue", "(", "lancet", ",", "issue", ")", "with", "taskstatus", "(", "\"Starting harvest timer\"", ")", "as", "ts", ":", "lancet", ".", "timer", ".", "start", "(", "issue", ")", "ts", ".", "ok", "(", "\"Started harvest timer\"", ")" ]
Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch.
[ "Start", "an", "Harvest", "timer", "for", "the", "given", "issue", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/commands/workflow.py#L141-L153
GaretJax/lancet
lancet/commands/workflow.py
pause
def pause(ctx): """ Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer. """ lancet = ctx.obj paused_status = lancet.config.get("tracker", "paused_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, paused_status) # Activate environment set_issue_status(lancet, issue, paused_status, transition) with taskstatus("Pausing harvest timer") as ts: lancet.timer.pause() ts.ok("Harvest timer paused")
python
def pause(ctx): """ Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer. """ lancet = ctx.obj paused_status = lancet.config.get("tracker", "paused_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, paused_status) # Activate environment set_issue_status(lancet, issue, paused_status, transition) with taskstatus("Pausing harvest timer") as ts: lancet.timer.pause() ts.ok("Harvest timer paused")
[ "def", "pause", "(", "ctx", ")", ":", "lancet", "=", "ctx", ".", "obj", "paused_status", "=", "lancet", ".", "config", ".", "get", "(", "\"tracker\"", ",", "\"paused_status\"", ")", "# Get the issue", "issue", "=", "get_issue", "(", "lancet", ")", "# Make sure the issue is in a correct status", "transition", "=", "get_transition", "(", "ctx", ",", "lancet", ",", "issue", ",", "paused_status", ")", "# Activate environment", "set_issue_status", "(", "lancet", ",", "issue", ",", "paused_status", ",", "transition", ")", "with", "taskstatus", "(", "\"Pausing harvest timer\"", ")", "as", "ts", ":", "lancet", ".", "timer", ".", "pause", "(", ")", "ts", ".", "ok", "(", "\"Harvest timer paused\"", ")" ]
Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer.
[ "Pause", "work", "on", "the", "current", "issue", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/commands/workflow.py#L158-L179
GaretJax/lancet
lancet/commands/workflow.py
resume
def resume(ctx): """ Resume work on the currently active issue. The issue is retrieved from the currently active branch name. """ lancet = ctx.obj username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Resuming harvest timer") as ts: lancet.timer.start(issue) ts.ok("Resumed harvest timer")
python
def resume(ctx): """ Resume work on the currently active issue. The issue is retrieved from the currently active branch name. """ lancet = ctx.obj username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") # Get the issue issue = get_issue(lancet) # Make sure the issue is in a correct status transition = get_transition(ctx, lancet, issue, active_status) # Make sure the issue is assigned to us assign_issue(lancet, issue, username, active_status) # Activate environment set_issue_status(lancet, issue, active_status, transition) with taskstatus("Resuming harvest timer") as ts: lancet.timer.start(issue) ts.ok("Resumed harvest timer")
[ "def", "resume", "(", "ctx", ")", ":", "lancet", "=", "ctx", ".", "obj", "username", "=", "lancet", ".", "tracker", ".", "whoami", "(", ")", "active_status", "=", "lancet", ".", "config", ".", "get", "(", "\"tracker\"", ",", "\"active_status\"", ")", "# Get the issue", "issue", "=", "get_issue", "(", "lancet", ")", "# Make sure the issue is in a correct status", "transition", "=", "get_transition", "(", "ctx", ",", "lancet", ",", "issue", ",", "active_status", ")", "# Make sure the issue is assigned to us", "assign_issue", "(", "lancet", ",", "issue", ",", "username", ",", "active_status", ")", "# Activate environment", "set_issue_status", "(", "lancet", ",", "issue", ",", "active_status", ",", "transition", ")", "with", "taskstatus", "(", "\"Resuming harvest timer\"", ")", "as", "ts", ":", "lancet", ".", "timer", ".", "start", "(", "issue", ")", "ts", ".", "ok", "(", "\"Resumed harvest timer\"", ")" ]
Resume work on the currently active issue. The issue is retrieved from the currently active branch name.
[ "Resume", "work", "on", "the", "currently", "active", "issue", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/commands/workflow.py#L184-L209
GaretJax/lancet
lancet/contrib/dploi.py
ssh
def ssh(lancet, print_cmd, environment): """ SSH into the given environment, based on the dploi configuration. """ namespace = {} with open(lancet.config.get('dploi', 'deployment_spec')) as fh: code = compile(fh.read(), 'deployment.py', 'exec') exec(code, {}, namespace) config = namespace['settings'][environment] host = '{}@{}'.format(config['user'], config['hosts'][0]) cmd = ['ssh', '-p', str(config.get('port', 22)), host] if print_cmd: click.echo(' '.join(quote(s) for s in cmd)) else: lancet.defer_to_shell(*cmd)
python
def ssh(lancet, print_cmd, environment): """ SSH into the given environment, based on the dploi configuration. """ namespace = {} with open(lancet.config.get('dploi', 'deployment_spec')) as fh: code = compile(fh.read(), 'deployment.py', 'exec') exec(code, {}, namespace) config = namespace['settings'][environment] host = '{}@{}'.format(config['user'], config['hosts'][0]) cmd = ['ssh', '-p', str(config.get('port', 22)), host] if print_cmd: click.echo(' '.join(quote(s) for s in cmd)) else: lancet.defer_to_shell(*cmd)
[ "def", "ssh", "(", "lancet", ",", "print_cmd", ",", "environment", ")", ":", "namespace", "=", "{", "}", "with", "open", "(", "lancet", ".", "config", ".", "get", "(", "'dploi'", ",", "'deployment_spec'", ")", ")", "as", "fh", ":", "code", "=", "compile", "(", "fh", ".", "read", "(", ")", ",", "'deployment.py'", ",", "'exec'", ")", "exec", "(", "code", ",", "{", "}", ",", "namespace", ")", "config", "=", "namespace", "[", "'settings'", "]", "[", "environment", "]", "host", "=", "'{}@{}'", ".", "format", "(", "config", "[", "'user'", "]", ",", "config", "[", "'hosts'", "]", "[", "0", "]", ")", "cmd", "=", "[", "'ssh'", ",", "'-p'", ",", "str", "(", "config", ".", "get", "(", "'port'", ",", "22", ")", ")", ",", "host", "]", "if", "print_cmd", ":", "click", ".", "echo", "(", "' '", ".", "join", "(", "quote", "(", "s", ")", "for", "s", "in", "cmd", ")", ")", "else", ":", "lancet", ".", "defer_to_shell", "(", "*", "cmd", ")" ]
SSH into the given environment, based on the dploi configuration.
[ "SSH", "into", "the", "given", "environment", "based", "on", "the", "dploi", "configuration", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/contrib/dploi.py#L11-L28
GaretJax/lancet
lancet/cli.py
_setup_helper
def _setup_helper(): """Print the shell integration code.""" base = os.path.abspath(os.path.dirname(__file__)) helper = os.path.join(base, "helper.sh") with open(helper) as fh: click.echo(fh.read())
python
def _setup_helper(): """Print the shell integration code.""" base = os.path.abspath(os.path.dirname(__file__)) helper = os.path.join(base, "helper.sh") with open(helper) as fh: click.echo(fh.read())
[ "def", "_setup_helper", "(", ")", ":", "base", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "helper", "=", "os", ".", "path", ".", "join", "(", "base", ",", "\"helper.sh\"", ")", "with", "open", "(", "helper", ")", "as", "fh", ":", "click", ".", "echo", "(", "fh", ".", "read", "(", ")", ")" ]
Print the shell integration code.
[ "Print", "the", "shell", "integration", "code", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/cli.py#L273-L278
GaretJax/lancet
lancet/cli.py
_commands
def _commands(ctx): """Prints a list of commands for shell completion hooks.""" ctx = ctx.parent ctx.show_hidden_subcommands = False main = ctx.command for subcommand in main.list_commands(ctx): cmd = main.get_command(ctx, subcommand) if cmd is None: continue help = cmd.short_help or "" click.echo("{}:{}".format(subcommand, help))
python
def _commands(ctx): """Prints a list of commands for shell completion hooks.""" ctx = ctx.parent ctx.show_hidden_subcommands = False main = ctx.command for subcommand in main.list_commands(ctx): cmd = main.get_command(ctx, subcommand) if cmd is None: continue help = cmd.short_help or "" click.echo("{}:{}".format(subcommand, help))
[ "def", "_commands", "(", "ctx", ")", ":", "ctx", "=", "ctx", ".", "parent", "ctx", ".", "show_hidden_subcommands", "=", "False", "main", "=", "ctx", ".", "command", "for", "subcommand", "in", "main", ".", "list_commands", "(", "ctx", ")", ":", "cmd", "=", "main", ".", "get_command", "(", "ctx", ",", "subcommand", ")", "if", "cmd", "is", "None", ":", "continue", "help", "=", "cmd", ".", "short_help", "or", "\"\"", "click", ".", "echo", "(", "\"{}:{}\"", ".", "format", "(", "subcommand", ",", "help", ")", ")" ]
Prints a list of commands for shell completion hooks.
[ "Prints", "a", "list", "of", "commands", "for", "shell", "completion", "hooks", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/cli.py#L283-L294
GaretJax/lancet
lancet/cli.py
_arguments
def _arguments(ctx, command_name=None): """Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported. """ ctx = ctx.parent main = ctx.command if command_name: command = main.get_command(ctx, command_name) if not command: return else: command = main types = ["option", "argument"] all_params = sorted( command.get_params(ctx), key=lambda p: types.index(p.param_type_name) ) def get_name(param): return max(param.opts, key=len) for param in all_params: if param.param_type_name == "option": option = get_name(param) same_dest = [ get_name(p) for p in all_params if p.name == param.name ] if same_dest: option = "({})".format(" ".join(same_dest)) + option if param.help: option += "[{}]".format(param.help or "") if not param.is_flag: option += "=:( )" click.echo(option) elif param.param_type_name == "argument": option = get_name(param) click.echo(":{}".format(option))
python
def _arguments(ctx, command_name=None): """Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported. """ ctx = ctx.parent main = ctx.command if command_name: command = main.get_command(ctx, command_name) if not command: return else: command = main types = ["option", "argument"] all_params = sorted( command.get_params(ctx), key=lambda p: types.index(p.param_type_name) ) def get_name(param): return max(param.opts, key=len) for param in all_params: if param.param_type_name == "option": option = get_name(param) same_dest = [ get_name(p) for p in all_params if p.name == param.name ] if same_dest: option = "({})".format(" ".join(same_dest)) + option if param.help: option += "[{}]".format(param.help or "") if not param.is_flag: option += "=:( )" click.echo(option) elif param.param_type_name == "argument": option = get_name(param) click.echo(":{}".format(option))
[ "def", "_arguments", "(", "ctx", ",", "command_name", "=", "None", ")", ":", "ctx", "=", "ctx", ".", "parent", "main", "=", "ctx", ".", "command", "if", "command_name", ":", "command", "=", "main", ".", "get_command", "(", "ctx", ",", "command_name", ")", "if", "not", "command", ":", "return", "else", ":", "command", "=", "main", "types", "=", "[", "\"option\"", ",", "\"argument\"", "]", "all_params", "=", "sorted", "(", "command", ".", "get_params", "(", "ctx", ")", ",", "key", "=", "lambda", "p", ":", "types", ".", "index", "(", "p", ".", "param_type_name", ")", ")", "def", "get_name", "(", "param", ")", ":", "return", "max", "(", "param", ".", "opts", ",", "key", "=", "len", ")", "for", "param", "in", "all_params", ":", "if", "param", ".", "param_type_name", "==", "\"option\"", ":", "option", "=", "get_name", "(", "param", ")", "same_dest", "=", "[", "get_name", "(", "p", ")", "for", "p", "in", "all_params", "if", "p", ".", "name", "==", "param", ".", "name", "]", "if", "same_dest", ":", "option", "=", "\"({})\"", ".", "format", "(", "\" \"", ".", "join", "(", "same_dest", ")", ")", "+", "option", "if", "param", ".", "help", ":", "option", "+=", "\"[{}]\"", ".", "format", "(", "param", ".", "help", "or", "\"\"", ")", "if", "not", "param", ".", "is_flag", ":", "option", "+=", "\"=:( )\"", "click", ".", "echo", "(", "option", ")", "elif", "param", ".", "param_type_name", "==", "\"argument\"", ":", "option", "=", "get_name", "(", "param", ")", "click", ".", "echo", "(", "\":{}\"", ".", "format", "(", "option", ")", ")" ]
Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported.
[ "Prints", "a", "list", "of", "arguments", "for", "shell", "completion", "hooks", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/cli.py#L300-L338
GaretJax/lancet
lancet/cli.py
_autocomplete
def _autocomplete(ctx, shell): """Print the shell autocompletion code.""" if not shell: shell = os.environ.get("SHELL", "") shell = os.path.basename(shell).lower() if not shell: click.secho( "Your shell could not be detected, please pass its name " "as the argument.", fg="red", ) ctx.exit(-1) base = os.path.abspath(os.path.dirname(__file__)) autocomplete = os.path.join(base, "autocomplete", "{}.sh".format(shell)) if not os.path.exists(autocomplete): click.secho( "Autocompletion for your shell ({}) is currently not " "supported.", fg="red", ) ctx.exit(-1) with open(autocomplete) as fh: click.echo(fh.read())
python
def _autocomplete(ctx, shell): """Print the shell autocompletion code.""" if not shell: shell = os.environ.get("SHELL", "") shell = os.path.basename(shell).lower() if not shell: click.secho( "Your shell could not be detected, please pass its name " "as the argument.", fg="red", ) ctx.exit(-1) base = os.path.abspath(os.path.dirname(__file__)) autocomplete = os.path.join(base, "autocomplete", "{}.sh".format(shell)) if not os.path.exists(autocomplete): click.secho( "Autocompletion for your shell ({}) is currently not " "supported.", fg="red", ) ctx.exit(-1) with open(autocomplete) as fh: click.echo(fh.read())
[ "def", "_autocomplete", "(", "ctx", ",", "shell", ")", ":", "if", "not", "shell", ":", "shell", "=", "os", ".", "environ", ".", "get", "(", "\"SHELL\"", ",", "\"\"", ")", "shell", "=", "os", ".", "path", ".", "basename", "(", "shell", ")", ".", "lower", "(", ")", "if", "not", "shell", ":", "click", ".", "secho", "(", "\"Your shell could not be detected, please pass its name \"", "\"as the argument.\"", ",", "fg", "=", "\"red\"", ",", ")", "ctx", ".", "exit", "(", "-", "1", ")", "base", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "autocomplete", "=", "os", ".", "path", ".", "join", "(", "base", ",", "\"autocomplete\"", ",", "\"{}.sh\"", ".", "format", "(", "shell", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "autocomplete", ")", ":", "click", ".", "secho", "(", "\"Autocompletion for your shell ({}) is currently not \"", "\"supported.\"", ",", "fg", "=", "\"red\"", ",", ")", "ctx", ".", "exit", "(", "-", "1", ")", "with", "open", "(", "autocomplete", ")", "as", "fh", ":", "click", ".", "echo", "(", "fh", ".", "read", "(", ")", ")" ]
Print the shell autocompletion code.
[ "Print", "the", "shell", "autocompletion", "code", "." ]
train
https://github.com/GaretJax/lancet/blob/cf438c5c6166b18ee0dc5ffce55220793019bb95/lancet/cli.py#L344-L369
OCHA-DAP/hdx-python-utilities
src/hdx/utilities/__init__.py
raisefrom
def raisefrom(exc_type, message, exc): # type: (Any, str, BaseException) -> None """Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None """ if sys.version_info[:2] >= (3, 2): six.raise_from(exc_type(message), exc) else: six.reraise(exc_type, '%s - %s' % (message, exc), sys.exc_info()[2])
python
def raisefrom(exc_type, message, exc): # type: (Any, str, BaseException) -> None """Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None """ if sys.version_info[:2] >= (3, 2): six.raise_from(exc_type(message), exc) else: six.reraise(exc_type, '%s - %s' % (message, exc), sys.exc_info()[2])
[ "def", "raisefrom", "(", "exc_type", ",", "message", ",", "exc", ")", ":", "# type: (Any, str, BaseException) -> None", "if", "sys", ".", "version_info", "[", ":", "2", "]", ">=", "(", "3", ",", "2", ")", ":", "six", ".", "raise_from", "(", "exc_type", "(", "message", ")", ",", "exc", ")", "else", ":", "six", ".", "reraise", "(", "exc_type", ",", "'%s - %s'", "%", "(", "message", ",", "exc", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")" ]
Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None
[ "Call", "Python", "3", "raise", "from", "or", "emulate", "it", "for", "Python", "2" ]
train
https://github.com/OCHA-DAP/hdx-python-utilities/blob/9c89e0aa5afac2c002b02a2d8f0e5b91eeb3d2a3/src/hdx/utilities/__init__.py#L7-L23
RockFeng0/rtsf
rtsf/p_executer.py
Runner.init_runner
def init_runner(self, parser, tracers, projinfo): ''' initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":tracer_obj1, "192.168.0.2:5555":tracer_obj2} @param proj_info: dict type of test case. use like: self.proj_info["module"], self.proj_info["name"] yaml case like: - project: name: xxx module: xxxx dict case like: {"project": {"name": xxx, "module": xxxx}} ''' self.parser = parser self.tracers = tracers self.proj_info = projinfo
python
def init_runner(self, parser, tracers, projinfo): ''' initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":tracer_obj1, "192.168.0.2:5555":tracer_obj2} @param proj_info: dict type of test case. use like: self.proj_info["module"], self.proj_info["name"] yaml case like: - project: name: xxx module: xxxx dict case like: {"project": {"name": xxx, "module": xxxx}} ''' self.parser = parser self.tracers = tracers self.proj_info = projinfo
[ "def", "init_runner", "(", "self", ",", "parser", ",", "tracers", ",", "projinfo", ")", ":", "self", ".", "parser", "=", "parser", "self", ".", "tracers", "=", "tracers", "self", ".", "proj_info", "=", "projinfo" ]
initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":tracer_obj1, "192.168.0.2:5555":tracer_obj2} @param proj_info: dict type of test case. use like: self.proj_info["module"], self.proj_info["name"] yaml case like: - project: name: xxx module: xxxx dict case like: {"project": {"name": xxx, "module": xxxx}}
[ "initial", "some", "instances", "for", "preparing", "to", "run", "test", "case" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_executer.py#L227-L243
RockFeng0/rtsf
rtsf/p_executer.py
Runner._run_grid_multiprocess
def _run_grid_multiprocess(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' multiprocessing.freeze_support() pool = multiprocessing.Pool() pool_tracers = pool.map(func, iterables) pool.close() pool.join() # 传递给 pool.map的 实例对象,内存地址发生变化, 因此,这里在运行结束后,重新定义 self.tracers self.tracers = dict(zip(self._default_devices, pool_tracers))
python
def _run_grid_multiprocess(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' multiprocessing.freeze_support() pool = multiprocessing.Pool() pool_tracers = pool.map(func, iterables) pool.close() pool.join() # 传递给 pool.map的 实例对象,内存地址发生变化, 因此,这里在运行结束后,重新定义 self.tracers self.tracers = dict(zip(self._default_devices, pool_tracers))
[ "def", "_run_grid_multiprocess", "(", "self", ",", "func", ",", "iterables", ")", ":", "multiprocessing", ".", "freeze_support", "(", ")", "pool", "=", "multiprocessing", ".", "Pool", "(", ")", "pool_tracers", "=", "pool", ".", "map", "(", "func", ",", "iterables", ")", "pool", ".", "close", "(", ")", "pool", ".", "join", "(", ")", "# 传递给 pool.map的 实例对象,内存地址发生变化, 因此,这里在运行结束后,重新定义 self.tracers \r", "self", ".", "tracers", "=", "dict", "(", "zip", "(", "self", ".", "_default_devices", ",", "pool_tracers", ")", ")" ]
running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects
[ "running", "case", "with", "mutil", "process", "to", "support", "selenium", "grid", "-", "mode", "(", "multiple", "web", ")", "and", "appium", "grid", "-", "mode", "(", "multiple", "devices", ")", "." ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_executer.py#L282-L294
RockFeng0/rtsf
rtsf/p_executer.py
Runner._run_grid_multithread
def _run_grid_multithread(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' f = lambda x: threading.Thread(target = func,args = (x,)) threads = map(f, iterables) for thread in threads: thread.setDaemon(True) thread.start() thread.join()
python
def _run_grid_multithread(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' f = lambda x: threading.Thread(target = func,args = (x,)) threads = map(f, iterables) for thread in threads: thread.setDaemon(True) thread.start() thread.join()
[ "def", "_run_grid_multithread", "(", "self", ",", "func", ",", "iterables", ")", ":", "f", "=", "lambda", "x", ":", "threading", ".", "Thread", "(", "target", "=", "func", ",", "args", "=", "(", "x", ",", ")", ")", "threads", "=", "map", "(", "f", ",", "iterables", ")", "for", "thread", "in", "threads", ":", "thread", ".", "setDaemon", "(", "True", ")", "thread", ".", "start", "(", ")", "thread", ".", "join", "(", ")" ]
running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects
[ "running", "case", "with", "mutil", "process", "to", "support", "selenium", "grid", "-", "mode", "(", "multiple", "web", ")", "and", "appium", "grid", "-", "mode", "(", "multiple", "devices", ")", "." ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_executer.py#L296-L307
RockFeng0/rtsf
rtsf/p_common.py
init_project_env
def init_project_env(subject='Automation', proj_path = None, sysencoding = "utf-8", debug = False): ''' Set the environment for pyrunner ''' # if sysencoding: # set_sys_encode(sysencoding) if not proj_path: try: executable_file_path = os.path.dirname(os.path.abspath(inspect.stack()[-1][1])) except: executable_file_path = os.path.dirname(sys.path[0]) finally: proj_path = executable_file_path p = os.path.join(proj_path,subject) proj_conf = { "sys_coding" : sysencoding, "debug" : debug, "module_name" : os.path.splitext(os.path.basename(subject))[0], "cfg_file" : os.path.join(p,"config.ini"), "path" : {"root" : p, "case" : os.path.join(p,"testcase"), "data" : os.path.join(p,"data"), "buffer" : os.path.join(p,"buffer"), "resource" : os.path.join(p,"resource"), "tools" : os.path.join(p,"tools"), "rst" : os.path.join(p,"result"), "rst_log" : os.path.join(p,"result","testcase"), "rst_shot" : os.path.join(p,"result","screenshots"), }, } [FileSystemUtils.mkdirs(v) for v in proj_conf["path"].values()] sys.path.append(p) if os.path.isdir(p) else "" return proj_conf
python
def init_project_env(subject='Automation', proj_path = None, sysencoding = "utf-8", debug = False): ''' Set the environment for pyrunner ''' # if sysencoding: # set_sys_encode(sysencoding) if not proj_path: try: executable_file_path = os.path.dirname(os.path.abspath(inspect.stack()[-1][1])) except: executable_file_path = os.path.dirname(sys.path[0]) finally: proj_path = executable_file_path p = os.path.join(proj_path,subject) proj_conf = { "sys_coding" : sysencoding, "debug" : debug, "module_name" : os.path.splitext(os.path.basename(subject))[0], "cfg_file" : os.path.join(p,"config.ini"), "path" : {"root" : p, "case" : os.path.join(p,"testcase"), "data" : os.path.join(p,"data"), "buffer" : os.path.join(p,"buffer"), "resource" : os.path.join(p,"resource"), "tools" : os.path.join(p,"tools"), "rst" : os.path.join(p,"result"), "rst_log" : os.path.join(p,"result","testcase"), "rst_shot" : os.path.join(p,"result","screenshots"), }, } [FileSystemUtils.mkdirs(v) for v in proj_conf["path"].values()] sys.path.append(p) if os.path.isdir(p) else "" return proj_conf
[ "def", "init_project_env", "(", "subject", "=", "'Automation'", ",", "proj_path", "=", "None", ",", "sysencoding", "=", "\"utf-8\"", ",", "debug", "=", "False", ")", ":", "# if sysencoding:\r", "# set_sys_encode(sysencoding)\r", "if", "not", "proj_path", ":", "try", ":", "executable_file_path", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "inspect", ".", "stack", "(", ")", "[", "-", "1", "]", "[", "1", "]", ")", ")", "except", ":", "executable_file_path", "=", "os", ".", "path", ".", "dirname", "(", "sys", ".", "path", "[", "0", "]", ")", "finally", ":", "proj_path", "=", "executable_file_path", "p", "=", "os", ".", "path", ".", "join", "(", "proj_path", ",", "subject", ")", "proj_conf", "=", "{", "\"sys_coding\"", ":", "sysencoding", ",", "\"debug\"", ":", "debug", ",", "\"module_name\"", ":", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "subject", ")", ")", "[", "0", "]", ",", "\"cfg_file\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"config.ini\"", ")", ",", "\"path\"", ":", "{", "\"root\"", ":", "p", ",", "\"case\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"testcase\"", ")", ",", "\"data\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"data\"", ")", ",", "\"buffer\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"buffer\"", ")", ",", "\"resource\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"resource\"", ")", ",", "\"tools\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"tools\"", ")", ",", "\"rst\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"result\"", ")", ",", "\"rst_log\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"result\"", ",", "\"testcase\"", ")", ",", "\"rst_shot\"", ":", "os", ".", "path", ".", "join", "(", "p", ",", "\"result\"", ",", "\"screenshots\"", ")", ",", "}", ",", "}", "[", "FileSystemUtils", ".", "mkdirs", "(", "v", ")", "for", "v", "in", "proj_conf", "[", "\"path\"", "]", ".", "values", "(", ")", "]", "sys", ".", "path", ".", "append", "(", "p", ")", "if", "os", ".", "path", ".", "isdir", "(", "p", ")", "else", "\"\"", "return", "proj_conf" ]
Set the environment for pyrunner
[ "Set", "the", "environment", "for", "pyrunner" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L76-L111
RockFeng0/rtsf
rtsf/p_common.py
seqfy
def seqfy(strs): ''' 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result) ''' if not strs: return result = "" seq = 1 ss = strs.split("\n") for i in ss: if i: result = "".join([result, str(seq), ".", i, "\n"]) seq = seq + 1 return result
python
def seqfy(strs): ''' 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result) ''' if not strs: return result = "" seq = 1 ss = strs.split("\n") for i in ss: if i: result = "".join([result, str(seq), ".", i, "\n"]) seq = seq + 1 return result
[ "def", "seqfy", "(", "strs", ")", ":", "if", "not", "strs", ":", "return", "result", "=", "\"\"", "seq", "=", "1", "ss", "=", "strs", ".", "split", "(", "\"\\n\"", ")", "for", "i", "in", "ss", ":", "if", "i", ":", "result", "=", "\"\"", ".", "join", "(", "[", "result", ",", "str", "(", "seq", ")", ",", "\".\"", ",", "i", ",", "\"\\n\"", "]", ")", "seq", "=", "seq", "+", "1", "return", "result" ]
序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result)
[ "序列化", "字符串", "---", ">", "实际效果是,为字符串,添加行号,返回字符串", "Sampe", "usage", ":", "strs", "=", "[", "None", "u", "First", "-", "line", "\\", "nSecond", "-", "line", "\\", "nThird", "-", "line", "u", "没有换行符", "]", "for", "s", "in", "strs", ":", "print", "---", "result", "=", "seqfy", "(", "s", ")", "print", "result", "print", "unseqfy", "(", "result", ")" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L786-L807
RockFeng0/rtsf
rtsf/p_common.py
stepfy
def stepfy(strs): ''' 步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasdfsdf", "1.asdfasdfsdf\n2.sodfi", "1.1.dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", "dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", ] for i in test_strs: steps = stepfy(i) un = unstepfy(steps) print "string: %r" %i print "stepfy: %s" %steps print "unstepfy: %r\n" %un ''' result = {} prog_step = re.compile("^\d+\.") if not strs: return result raws = strs.split("\n") for raw in raws: step_num = raws.index(raw) + 1 raw = prog_step.sub("",raw) if raw: result["Step_%s_info" %step_num] = raw return result
python
def stepfy(strs): ''' 步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasdfsdf", "1.asdfasdfsdf\n2.sodfi", "1.1.dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", "dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", ] for i in test_strs: steps = stepfy(i) un = unstepfy(steps) print "string: %r" %i print "stepfy: %s" %steps print "unstepfy: %r\n" %un ''' result = {} prog_step = re.compile("^\d+\.") if not strs: return result raws = strs.split("\n") for raw in raws: step_num = raws.index(raw) + 1 raw = prog_step.sub("",raw) if raw: result["Step_%s_info" %step_num] = raw return result
[ "def", "stepfy", "(", "strs", ")", ":", "result", "=", "{", "}", "prog_step", "=", "re", ".", "compile", "(", "\"^\\d+\\.\"", ")", "if", "not", "strs", ":", "return", "result", "raws", "=", "strs", ".", "split", "(", "\"\\n\"", ")", "for", "raw", "in", "raws", ":", "step_num", "=", "raws", ".", "index", "(", "raw", ")", "+", "1", "raw", "=", "prog_step", ".", "sub", "(", "\"\"", ",", "raw", ")", "if", "raw", ":", "result", "[", "\"Step_%s_info\"", "%", "step_num", "]", "=", "raw", "return", "result" ]
步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasdfsdf", "1.asdfasdfsdf\n2.sodfi", "1.1.dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", "dfasdfahttp://192.168.1.1sdfsdf2.1.1.1.1\n", ] for i in test_strs: steps = stepfy(i) un = unstepfy(steps) print "string: %r" %i print "stepfy: %s" %steps print "unstepfy: %r\n" %un
[ "步骤化", "字符串", "---", ">", "实际效果是", "依据", "序列化的字符串,转换为", "Step_%s_info", "的字典,", "返回字典", "Sample", "usage", ":", "test_strs", "=", "[", "None", "u", "First", "-", "line", "\\", "nSecond", "-", "line", "\\", "nThird", "-", "line", "u", "1", ".", "First", "-", "line", "\\", "n2", ".", "Second", "-", "line", "\\", "n3", ".", "Third", "-", "line", "\\", "n", "u", "3", ".", "没有换行符", "u", "3", ".", "有换行符", "\\", "n", "asdfasdfsdf", "1", ".", "asdfasdfsdf", "\\", "n2", ".", "sodfi", "1", ".", "1", ".", "dfasdfahttp", ":", "//", "192", ".", "168", ".", "1", ".", "1sdfsdf2", ".", "1", ".", "1", ".", "1", ".", "1", "\\", "n", "dfasdfahttp", ":", "//", "192", ".", "168", ".", "1", ".", "1sdfsdf2", ".", "1", ".", "1", ".", "1", ".", "1", "\\", "n", "]", "for", "i", "in", "test_strs", ":", "steps", "=", "stepfy", "(", "i", ")", "un", "=", "unstepfy", "(", "steps", ")", "print", "string", ":", "%r", "%i", "print", "stepfy", ":", "%s", "%steps", "print", "unstepfy", ":", "%r", "\\", "n", "%un" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L830-L865
RockFeng0/rtsf
rtsf/p_common.py
map_function
def map_function(func_str, fw_action_addtion=None,bw_action_addtion=None, alias_func=None): ''' Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test()) print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test()) ''' split_action_value = re.compile("^(\w+)(\((.*)\)$)?") matched = split_action_value.match(func_str) if matched: action = matched.group(1).lower() value = matched.group(2) #params = matched.group(3) if alias_func: action = alias_func if fw_action_addtion: action = fw_action_addtion + action if fw_action_addtion: action = action + bw_action_addtion if value: return action+value else: return action
python
def map_function(func_str, fw_action_addtion=None,bw_action_addtion=None, alias_func=None): ''' Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test()) print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test()) ''' split_action_value = re.compile("^(\w+)(\((.*)\)$)?") matched = split_action_value.match(func_str) if matched: action = matched.group(1).lower() value = matched.group(2) #params = matched.group(3) if alias_func: action = alias_func if fw_action_addtion: action = fw_action_addtion + action if fw_action_addtion: action = action + bw_action_addtion if value: return action+value else: return action
[ "def", "map_function", "(", "func_str", ",", "fw_action_addtion", "=", "None", ",", "bw_action_addtion", "=", "None", ",", "alias_func", "=", "None", ")", ":", "split_action_value", "=", "re", ".", "compile", "(", "\"^(\\w+)(\\((.*)\\)$)?\"", ")", "matched", "=", "split_action_value", ".", "match", "(", "func_str", ")", "if", "matched", ":", "action", "=", "matched", ".", "group", "(", "1", ")", ".", "lower", "(", ")", "value", "=", "matched", ".", "group", "(", "2", ")", "#params = matched.group(3)\r", "if", "alias_func", ":", "action", "=", "alias_func", "if", "fw_action_addtion", ":", "action", "=", "fw_action_addtion", "+", "action", "if", "fw_action_addtion", ":", "action", "=", "action", "+", "bw_action_addtion", "if", "value", ":", "return", "action", "+", "value", "else", ":", "return", "action" ]
Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> action_steps_ini_items_for_upd print map_function('set(a=1,b=2,c=Test())',"action_steps_","_for_upd","ini_items");# -> action_steps_ini_items_for_upd(a=1,b=2,c=Test()) print map_function('set("login",a="good",b=Test())',"action_steps_","_for_upd");# -> action_steps_set_for_upd("login",a="good",b=Test())
[ "Sample", "usage", ":", "print", "map_function", "(", "set", "alias_func", "=", "ini_items", ")", ";", "#", "-", ">", "ini_items", "print", "map_function", "(", "set", "fw_action_addtion", "=", "action_steps_", "bw_action_addtion", "=", "_for_upd", "alias_func", "=", "ini_items", ")", ";", "#", "-", ">", "action_steps_ini_items_for_upd", "print", "map_function", "(", "set", "(", "a", "=", "1", "b", "=", "2", "c", "=", "Test", "()", ")", "action_steps_", "_for_upd", "ini_items", ")", ";", "#", "-", ">", "action_steps_ini_items_for_upd", "(", "a", "=", "1", "b", "=", "2", "c", "=", "Test", "()", ")", "print", "map_function", "(", "set", "(", "login", "a", "=", "good", "b", "=", "Test", "()", ")", "action_steps_", "_for_upd", ")", ";", "#", "-", ">", "action_steps_set_for_upd", "(", "login", "a", "=", "good", "b", "=", "Test", "()", ")" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L886-L912
RockFeng0/rtsf
rtsf/p_common.py
IntelligentWaitUtils.until_cmd
def until_cmd(listcmd, end_expects=None, save2logfile=None, coding = encoding): ''' 执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码 ''' if end_expects and not isinstance(end_expects, p_compat.str): raise Exception("invalide unicode string: '%s'" %end_expects) lines = [] subp = subprocess.Popen(listcmd,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) while subp.poll()==None: next_line = subp.stdout.readline().decode(coding) if next_line: # print(next_line) lines.append(next_line) if end_expects and re.search(end_expects, next_line): result = True else: result = False subp.stdout.close() if subp.returncode: result = False lines.append("sub command error code: %s" %subp.returncode) if save2logfile: with open(save2logfile, 'a') as f: f.writelines(lines) return result
python
def until_cmd(listcmd, end_expects=None, save2logfile=None, coding = encoding): ''' 执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码 ''' if end_expects and not isinstance(end_expects, p_compat.str): raise Exception("invalide unicode string: '%s'" %end_expects) lines = [] subp = subprocess.Popen(listcmd,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) while subp.poll()==None: next_line = subp.stdout.readline().decode(coding) if next_line: # print(next_line) lines.append(next_line) if end_expects and re.search(end_expects, next_line): result = True else: result = False subp.stdout.close() if subp.returncode: result = False lines.append("sub command error code: %s" %subp.returncode) if save2logfile: with open(save2logfile, 'a') as f: f.writelines(lines) return result
[ "def", "until_cmd", "(", "listcmd", ",", "end_expects", "=", "None", ",", "save2logfile", "=", "None", ",", "coding", "=", "encoding", ")", ":", "if", "end_expects", "and", "not", "isinstance", "(", "end_expects", ",", "p_compat", ".", "str", ")", ":", "raise", "Exception", "(", "\"invalide unicode string: '%s'\"", "%", "end_expects", ")", "lines", "=", "[", "]", "subp", "=", "subprocess", ".", "Popen", "(", "listcmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "while", "subp", ".", "poll", "(", ")", "==", "None", ":", "next_line", "=", "subp", ".", "stdout", ".", "readline", "(", ")", ".", "decode", "(", "coding", ")", "if", "next_line", ":", "# print(next_line)\r", "lines", ".", "append", "(", "next_line", ")", "if", "end_expects", "and", "re", ".", "search", "(", "end_expects", ",", "next_line", ")", ":", "result", "=", "True", "else", ":", "result", "=", "False", "subp", ".", "stdout", ".", "close", "(", ")", "if", "subp", ".", "returncode", ":", "result", "=", "False", "lines", ".", "append", "(", "\"sub command error code: %s\"", "%", "subp", ".", "returncode", ")", "if", "save2logfile", ":", "with", "open", "(", "save2logfile", ",", "'a'", ")", "as", "f", ":", "f", ".", "writelines", "(", "lines", ")", "return", "result" ]
执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码
[ "执行系统命令", "并等待执行完" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L118-L151
RockFeng0/rtsf
rtsf/p_common.py
IntelligentWaitUtils.until
def until(method, timeout = 30, message=''): """Calls the method until the return value is not False.""" end_time = time.time() + timeout while True: try: value = method() if value: return value except: pass time.sleep(1) if time.time() > end_time: break raise Exception(message)
python
def until(method, timeout = 30, message=''): """Calls the method until the return value is not False.""" end_time = time.time() + timeout while True: try: value = method() if value: return value except: pass time.sleep(1) if time.time() > end_time: break raise Exception(message)
[ "def", "until", "(", "method", ",", "timeout", "=", "30", ",", "message", "=", "''", ")", ":", "end_time", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "True", ":", "try", ":", "value", "=", "method", "(", ")", "if", "value", ":", "return", "value", "except", ":", "pass", "time", ".", "sleep", "(", "1", ")", "if", "time", ".", "time", "(", ")", ">", "end_time", ":", "break", "raise", "Exception", "(", "message", ")" ]
Calls the method until the return value is not False.
[ "Calls", "the", "method", "until", "the", "return", "value", "is", "not", "False", "." ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L154-L167
RockFeng0/rtsf
rtsf/p_common.py
FileUtils._check_format
def _check_format(file_path, content): """ check testcase format if valid """ if not content: # testcase file content is empty err_msg = u"Testcase file content is empty: {}".format(file_path) raise p_exception.FileFormatError(err_msg) elif not isinstance(content, (list, dict)): # testcase file content does not match testcase format err_msg = u"Testcase file content format invalid: {}".format(file_path) raise p_exception.FileFormatError(err_msg)
python
def _check_format(file_path, content): """ check testcase format if valid """ if not content: # testcase file content is empty err_msg = u"Testcase file content is empty: {}".format(file_path) raise p_exception.FileFormatError(err_msg) elif not isinstance(content, (list, dict)): # testcase file content does not match testcase format err_msg = u"Testcase file content format invalid: {}".format(file_path) raise p_exception.FileFormatError(err_msg)
[ "def", "_check_format", "(", "file_path", ",", "content", ")", ":", "if", "not", "content", ":", "# testcase file content is empty\r", "err_msg", "=", "u\"Testcase file content is empty: {}\"", ".", "format", "(", "file_path", ")", "raise", "p_exception", ".", "FileFormatError", "(", "err_msg", ")", "elif", "not", "isinstance", "(", "content", ",", "(", "list", ",", "dict", ")", ")", ":", "# testcase file content does not match testcase format\r", "err_msg", "=", "u\"Testcase file content format invalid: {}\"", ".", "format", "(", "file_path", ")", "raise", "p_exception", ".", "FileFormatError", "(", "err_msg", ")" ]
check testcase format if valid
[ "check", "testcase", "format", "if", "valid" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L222-L233
RockFeng0/rtsf
rtsf/p_common.py
FileUtils._load_yaml_file
def _load_yaml_file(yaml_file): """ load yaml file and check file content format """ with io.open(yaml_file, 'r', encoding='utf-8') as stream: yaml_content = yaml.load(stream) FileUtils._check_format(yaml_file, yaml_content) return yaml_content
python
def _load_yaml_file(yaml_file): """ load yaml file and check file content format """ with io.open(yaml_file, 'r', encoding='utf-8') as stream: yaml_content = yaml.load(stream) FileUtils._check_format(yaml_file, yaml_content) return yaml_content
[ "def", "_load_yaml_file", "(", "yaml_file", ")", ":", "with", "io", ".", "open", "(", "yaml_file", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "stream", ":", "yaml_content", "=", "yaml", ".", "load", "(", "stream", ")", "FileUtils", ".", "_check_format", "(", "yaml_file", ",", "yaml_content", ")", "return", "yaml_content" ]
load yaml file and check file content format
[ "load", "yaml", "file", "and", "check", "file", "content", "format" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L236-L242
RockFeng0/rtsf
rtsf/p_common.py
FileUtils._load_json_file
def _load_json_file(json_file): """ load json file and check file content format """ with io.open(json_file, encoding='utf-8') as data_file: try: json_content = json.load(data_file) except p_exception.JSONDecodeError: err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file) raise p_exception.FileFormatError(err_msg) FileUtils._check_format(json_file, json_content) return json_content
python
def _load_json_file(json_file): """ load json file and check file content format """ with io.open(json_file, encoding='utf-8') as data_file: try: json_content = json.load(data_file) except p_exception.JSONDecodeError: err_msg = u"JSONDecodeError: JSON file format error: {}".format(json_file) raise p_exception.FileFormatError(err_msg) FileUtils._check_format(json_file, json_content) return json_content
[ "def", "_load_json_file", "(", "json_file", ")", ":", "with", "io", ".", "open", "(", "json_file", ",", "encoding", "=", "'utf-8'", ")", "as", "data_file", ":", "try", ":", "json_content", "=", "json", ".", "load", "(", "data_file", ")", "except", "p_exception", ".", "JSONDecodeError", ":", "err_msg", "=", "u\"JSONDecodeError: JSON file format error: {}\"", ".", "format", "(", "json_file", ")", "raise", "p_exception", ".", "FileFormatError", "(", "err_msg", ")", "FileUtils", ".", "_check_format", "(", "json_file", ",", "json_content", ")", "return", "json_content" ]
load json file and check file content format
[ "load", "json", "file", "and", "check", "file", "content", "format" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L245-L256
RockFeng0/rtsf
rtsf/p_common.py
FileUtils._load_csv_file
def _load_csv_file(csv_file): """ load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return list of parameter, each parameter is in dict format e.g. [ {'username': 'test1', 'password': '111111'}, {'username': 'test2', 'password': '222222'}, {'username': 'test3', 'password': '333333'} ] """ csv_content_list = [] with io.open(csv_file, encoding='utf-8') as csvfile: reader = csv.DictReader(csvfile) for row in reader: csv_content_list.append(row) return csv_content_list
python
def _load_csv_file(csv_file): """ load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return list of parameter, each parameter is in dict format e.g. [ {'username': 'test1', 'password': '111111'}, {'username': 'test2', 'password': '222222'}, {'username': 'test3', 'password': '333333'} ] """ csv_content_list = [] with io.open(csv_file, encoding='utf-8') as csvfile: reader = csv.DictReader(csvfile) for row in reader: csv_content_list.append(row) return csv_content_list
[ "def", "_load_csv_file", "(", "csv_file", ")", ":", "csv_content_list", "=", "[", "]", "with", "io", ".", "open", "(", "csv_file", ",", "encoding", "=", "'utf-8'", ")", "as", "csvfile", ":", "reader", "=", "csv", ".", "DictReader", "(", "csvfile", ")", "for", "row", "in", "reader", ":", "csv_content_list", ".", "append", "(", "row", ")", "return", "csv_content_list" ]
load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return list of parameter, each parameter is in dict format e.g. [ {'username': 'test1', 'password': '111111'}, {'username': 'test2', 'password': '222222'}, {'username': 'test3', 'password': '333333'} ]
[ "load", "csv", "file", "and", "check", "file", "content", "format" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L259-L284
RockFeng0/rtsf
rtsf/p_common.py
FileSystemUtils.force_delete_file
def force_delete_file(file_path): ''' force delete a file ''' if os.path.isfile(file_path): try: os.remove(file_path) return file_path except: return FileSystemUtils.add_unique_postfix(file_path) else: return file_path
python
def force_delete_file(file_path): ''' force delete a file ''' if os.path.isfile(file_path): try: os.remove(file_path) return file_path except: return FileSystemUtils.add_unique_postfix(file_path) else: return file_path
[ "def", "force_delete_file", "(", "file_path", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "try", ":", "os", ".", "remove", "(", "file_path", ")", "return", "file_path", "except", ":", "return", "FileSystemUtils", ".", "add_unique_postfix", "(", "file_path", ")", "else", ":", "return", "file_path" ]
force delete a file
[ "force", "delete", "a", "file" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L403-L412
RockFeng0/rtsf
rtsf/p_common.py
ZipUtils.mkzip
def mkzip(source_dir, output_filename): '''Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2")) ''' zipf = zipfile.ZipFile(output_filename, 'w', zipfile.zlib.DEFLATED) pre_len = len(os.path.dirname(source_dir)) for parent, dirnames, filenames in os.walk(source_dir): for filename in filenames: pathfile = os.path.join(parent, filename) arcname = pathfile[pre_len:].strip(os.path.sep);#相对路径 zipf.write(pathfile, arcname) zipf.close()
python
def mkzip(source_dir, output_filename): '''Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2")) ''' zipf = zipfile.ZipFile(output_filename, 'w', zipfile.zlib.DEFLATED) pre_len = len(os.path.dirname(source_dir)) for parent, dirnames, filenames in os.walk(source_dir): for filename in filenames: pathfile = os.path.join(parent, filename) arcname = pathfile[pre_len:].strip(os.path.sep);#相对路径 zipf.write(pathfile, arcname) zipf.close()
[ "def", "mkzip", "(", "source_dir", ",", "output_filename", ")", ":", "zipf", "=", "zipfile", ".", "ZipFile", "(", "output_filename", ",", "'w'", ",", "zipfile", ".", "zlib", ".", "DEFLATED", ")", "pre_len", "=", "len", "(", "os", ".", "path", ".", "dirname", "(", "source_dir", ")", ")", "for", "parent", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "source_dir", ")", ":", "for", "filename", "in", "filenames", ":", "pathfile", "=", "os", ".", "path", ".", "join", "(", "parent", ",", "filename", ")", "arcname", "=", "pathfile", "[", "pre_len", ":", "]", ".", "strip", "(", "os", ".", "path", ".", "sep", ")", "#相对路径\r", "zipf", ".", "write", "(", "pathfile", ",", "arcname", ")", "zipf", ".", "close", "(", ")" ]
Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2"))
[ "Usage", ":", "p", "=", "r", "D", ":", "\\", "auto", "\\", "env", "\\", "ttest", "\\", "ins", "\\", "build", "\\", "lib", "\\", "rock4", "\\", "softtest", "\\", "support", "mkzip", "(", "os", ".", "path", ".", "join", "(", "p", "appiumroot", ")", "os", ".", "path", ".", "join", "(", "p", "appiumroot", ".", "zip", "))", "unzip", "(", "os", ".", "path", ".", "join", "(", "p", "appiumroot", ".", "zip", ")", "os", ".", "path", ".", "join", "(", "p", "appiumroot2", "))" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L418-L431
RockFeng0/rtsf
rtsf/p_common.py
ModuleUtils.get_imported_module_from_file
def get_imported_module_from_file(file_path): """ import module from python file path and return imported module """ if p_compat.is_py3: imported_module = importlib.machinery.SourceFileLoader('module_name', file_path).load_module() elif p_compat.is_py2: imported_module = imp.load_source('module_name', file_path) else: raise RuntimeError("Neither Python 3 nor Python 2.") return imported_module
python
def get_imported_module_from_file(file_path): """ import module from python file path and return imported module """ if p_compat.is_py3: imported_module = importlib.machinery.SourceFileLoader('module_name', file_path).load_module() elif p_compat.is_py2: imported_module = imp.load_source('module_name', file_path) else: raise RuntimeError("Neither Python 3 nor Python 2.") return imported_module
[ "def", "get_imported_module_from_file", "(", "file_path", ")", ":", "if", "p_compat", ".", "is_py3", ":", "imported_module", "=", "importlib", ".", "machinery", ".", "SourceFileLoader", "(", "'module_name'", ",", "file_path", ")", ".", "load_module", "(", ")", "elif", "p_compat", ".", "is_py2", ":", "imported_module", "=", "imp", ".", "load_source", "(", "'module_name'", ",", "file_path", ")", "else", ":", "raise", "RuntimeError", "(", "\"Neither Python 3 nor Python 2.\"", ")", "return", "imported_module" ]
import module from python file path and return imported module
[ "import", "module", "from", "python", "file", "path", "and", "return", "imported", "module" ]
train
https://github.com/RockFeng0/rtsf/blob/fbc0d57edaeca86418af3942472fcc6d3e9ce591/rtsf/p_common.py#L498-L508