repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
libvips/pyvips
pyvips/base.py
get_suffixes
def get_suffixes(): """Get a list of all the filename suffixes supported by libvips. Returns: [string] """ names = [] if at_least_libvips(8, 8): array = vips_lib.vips_foreign_get_suffixes() i = 0 while array[i] != ffi.NULL: name = _to_string(array[i]) if name not in names: names.append(name) glib_lib.g_free(array[i]) i += 1 glib_lib.g_free(array) return names
python
def get_suffixes(): """Get a list of all the filename suffixes supported by libvips. Returns: [string] """ names = [] if at_least_libvips(8, 8): array = vips_lib.vips_foreign_get_suffixes() i = 0 while array[i] != ffi.NULL: name = _to_string(array[i]) if name not in names: names.append(name) glib_lib.g_free(array[i]) i += 1 glib_lib.g_free(array) return names
[ "def", "get_suffixes", "(", ")", ":", "names", "=", "[", "]", "if", "at_least_libvips", "(", "8", ",", "8", ")", ":", "array", "=", "vips_lib", ".", "vips_foreign_get_suffixes", "(", ")", "i", "=", "0", "while", "array", "[", "i", "]", "!=", "ffi", ".", "NULL", ":", "name", "=", "_to_string", "(", "array", "[", "i", "]", ")", "if", "name", "not", "in", "names", ":", "names", ".", "append", "(", "name", ")", "glib_lib", ".", "g_free", "(", "array", "[", "i", "]", ")", "i", "+=", "1", "glib_lib", ".", "g_free", "(", "array", ")", "return", "names" ]
Get a list of all the filename suffixes supported by libvips. Returns: [string]
[ "Get", "a", "list", "of", "all", "the", "filename", "suffixes", "supported", "by", "libvips", "." ]
f4d9334d2e3085b4b058129f14ac17a7872b109b
https://github.com/libvips/pyvips/blob/f4d9334d2e3085b4b058129f14ac17a7872b109b/pyvips/base.py#L39-L60
train
libvips/pyvips
pyvips/base.py
at_least_libvips
def at_least_libvips(x, y): """Is this at least libvips x.y?""" major = version(0) minor = version(1) return major > x or (major == x and minor >= y)
python
def at_least_libvips(x, y): """Is this at least libvips x.y?""" major = version(0) minor = version(1) return major > x or (major == x and minor >= y)
[ "def", "at_least_libvips", "(", "x", ",", "y", ")", ":", "major", "=", "version", "(", "0", ")", "minor", "=", "version", "(", "1", ")", "return", "major", ">", "x", "or", "(", "major", "==", "x", "and", "minor", ">=", "y", ")" ]
Is this at least libvips x.y?
[ "Is", "this", "at", "least", "libvips", "x", ".", "y?" ]
f4d9334d2e3085b4b058129f14ac17a7872b109b
https://github.com/libvips/pyvips/blob/f4d9334d2e3085b4b058129f14ac17a7872b109b/pyvips/base.py#L65-L71
train
libvips/pyvips
pyvips/base.py
type_map
def type_map(gtype, fn): """Map fn over all child types of gtype.""" cb = ffi.callback('VipsTypeMap2Fn', fn) return vips_lib.vips_type_map(gtype, cb, ffi.NULL, ffi.NULL)
python
def type_map(gtype, fn): """Map fn over all child types of gtype.""" cb = ffi.callback('VipsTypeMap2Fn', fn) return vips_lib.vips_type_map(gtype, cb, ffi.NULL, ffi.NULL)
[ "def", "type_map", "(", "gtype", ",", "fn", ")", ":", "cb", "=", "ffi", ".", "callback", "(", "'VipsTypeMap2Fn'", ",", "fn", ")", "return", "vips_lib", ".", "vips_type_map", "(", "gtype", ",", "cb", ",", "ffi", ".", "NULL", ",", "ffi", ".", "NULL", ")" ]
Map fn over all child types of gtype.
[ "Map", "fn", "over", "all", "child", "types", "of", "gtype", "." ]
f4d9334d2e3085b4b058129f14ac17a7872b109b
https://github.com/libvips/pyvips/blob/f4d9334d2e3085b4b058129f14ac17a7872b109b/pyvips/base.py#L110-L114
train
borntyping/python-colorlog
colorlog/logging.py
basicConfig
def basicConfig(**kwargs): """Call ``logging.basicConfig`` and override the formatter it creates.""" logging.basicConfig(**kwargs) logging._acquireLock() try: stream = logging.root.handlers[0] stream.setFormatter( ColoredFormatter( fmt=kwargs.get('format', BASIC_FORMAT), datefmt=kwargs.get('datefmt', None))) finally: logging._releaseLock()
python
def basicConfig(**kwargs): """Call ``logging.basicConfig`` and override the formatter it creates.""" logging.basicConfig(**kwargs) logging._acquireLock() try: stream = logging.root.handlers[0] stream.setFormatter( ColoredFormatter( fmt=kwargs.get('format', BASIC_FORMAT), datefmt=kwargs.get('datefmt', None))) finally: logging._releaseLock()
[ "def", "basicConfig", "(", "*", "*", "kwargs", ")", ":", "logging", ".", "basicConfig", "(", "*", "*", "kwargs", ")", "logging", ".", "_acquireLock", "(", ")", "try", ":", "stream", "=", "logging", ".", "root", ".", "handlers", "[", "0", "]", "stream", ".", "setFormatter", "(", "ColoredFormatter", "(", "fmt", "=", "kwargs", ".", "get", "(", "'format'", ",", "BASIC_FORMAT", ")", ",", "datefmt", "=", "kwargs", ".", "get", "(", "'datefmt'", ",", "None", ")", ")", ")", "finally", ":", "logging", ".", "_releaseLock", "(", ")" ]
Call ``logging.basicConfig`` and override the formatter it creates.
[ "Call", "logging", ".", "basicConfig", "and", "override", "the", "formatter", "it", "creates", "." ]
d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7
https://github.com/borntyping/python-colorlog/blob/d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7/colorlog/logging.py#L13-L24
train
borntyping/python-colorlog
colorlog/logging.py
ensure_configured
def ensure_configured(func): """Modify a function to call ``basicConfig`` first if no handlers exist.""" @functools.wraps(func) def wrapper(*args, **kwargs): if len(logging.root.handlers) == 0: basicConfig() return func(*args, **kwargs) return wrapper
python
def ensure_configured(func): """Modify a function to call ``basicConfig`` first if no handlers exist.""" @functools.wraps(func) def wrapper(*args, **kwargs): if len(logging.root.handlers) == 0: basicConfig() return func(*args, **kwargs) return wrapper
[ "def", "ensure_configured", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "logging", ".", "root", ".", "handlers", ")", "==", "0", ":", "basicConfig", "(", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
Modify a function to call ``basicConfig`` first if no handlers exist.
[ "Modify", "a", "function", "to", "call", "basicConfig", "first", "if", "no", "handlers", "exist", "." ]
d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7
https://github.com/borntyping/python-colorlog/blob/d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7/colorlog/logging.py#L27-L34
train
borntyping/python-colorlog
colorlog/colorlog.py
TTYColoredFormatter.color
def color(self, log_colors, level_name): """Only returns colors if STDOUT is a TTY.""" if not self.stream.isatty(): log_colors = {} return ColoredFormatter.color(self, log_colors, level_name)
python
def color(self, log_colors, level_name): """Only returns colors if STDOUT is a TTY.""" if not self.stream.isatty(): log_colors = {} return ColoredFormatter.color(self, log_colors, level_name)
[ "def", "color", "(", "self", ",", "log_colors", ",", "level_name", ")", ":", "if", "not", "self", ".", "stream", ".", "isatty", "(", ")", ":", "log_colors", "=", "{", "}", "return", "ColoredFormatter", ".", "color", "(", "self", ",", "log_colors", ",", "level_name", ")" ]
Only returns colors if STDOUT is a TTY.
[ "Only", "returns", "colors", "if", "STDOUT", "is", "a", "TTY", "." ]
d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7
https://github.com/borntyping/python-colorlog/blob/d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7/colorlog/colorlog.py#L207-L211
train
borntyping/python-colorlog
doc/example.py
setup_logger
def setup_logger(): """Return a logger with a default ColoredFormatter.""" formatter = ColoredFormatter( "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', } ) logger = logging.getLogger('example') handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) return logger
python
def setup_logger(): """Return a logger with a default ColoredFormatter.""" formatter = ColoredFormatter( "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s", datefmt=None, reset=True, log_colors={ 'DEBUG': 'cyan', 'INFO': 'green', 'WARNING': 'yellow', 'ERROR': 'red', 'CRITICAL': 'red', } ) logger = logging.getLogger('example') handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) return logger
[ "def", "setup_logger", "(", ")", ":", "formatter", "=", "ColoredFormatter", "(", "\"%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s\"", ",", "datefmt", "=", "None", ",", "reset", "=", "True", ",", "log_colors", "=", "{", "'DEBUG'", ":", "'cyan'", ",", "'INFO'", ":", "'green'", ",", "'WARNING'", ":", "'yellow'", ",", "'ERROR'", ":", "'red'", ",", "'CRITICAL'", ":", "'red'", ",", "}", ")", "logger", "=", "logging", ".", "getLogger", "(", "'example'", ")", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "handler", ")", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "return", "logger" ]
Return a logger with a default ColoredFormatter.
[ "Return", "a", "logger", "with", "a", "default", "ColoredFormatter", "." ]
d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7
https://github.com/borntyping/python-colorlog/blob/d2be1e0e9bff0ceb288c6a6381a6d12cf550e1e7/doc/example.py#L8-L29
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorBase._extract_annotations_from_task
def _extract_annotations_from_task(self, task): """ Removes annotations from a task and returns a list of annotations """ annotations = list() if 'annotations' in task: existing_annotations = task.pop('annotations') for v in existing_annotations: if isinstance(v, dict): annotations.append(v['description']) else: annotations.append(v) for key in list(task.keys()): if key.startswith('annotation_'): annotations.append(task[key]) del(task[key]) return annotations
python
def _extract_annotations_from_task(self, task): """ Removes annotations from a task and returns a list of annotations """ annotations = list() if 'annotations' in task: existing_annotations = task.pop('annotations') for v in existing_annotations: if isinstance(v, dict): annotations.append(v['description']) else: annotations.append(v) for key in list(task.keys()): if key.startswith('annotation_'): annotations.append(task[key]) del(task[key]) return annotations
[ "def", "_extract_annotations_from_task", "(", "self", ",", "task", ")", ":", "annotations", "=", "list", "(", ")", "if", "'annotations'", "in", "task", ":", "existing_annotations", "=", "task", ".", "pop", "(", "'annotations'", ")", "for", "v", "in", "existing_annotations", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "annotations", ".", "append", "(", "v", "[", "'description'", "]", ")", "else", ":", "annotations", ".", "append", "(", "v", ")", "for", "key", "in", "list", "(", "task", ".", "keys", "(", ")", ")", ":", "if", "key", ".", "startswith", "(", "'annotation_'", ")", ":", "annotations", ".", "append", "(", "task", "[", "key", "]", ")", "del", "(", "task", "[", "key", "]", ")", "return", "annotations" ]
Removes annotations from a task and returns a list of annotations
[ "Removes", "annotations", "from", "a", "task", "and", "returns", "a", "list", "of", "annotations" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L94-L111
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorDirect.task_done
def task_done(self, **kw): """ Marks a pending task as done, optionally specifying a completion date with the 'end' argument. """ def validate(task): if not Status.is_pending(task['status']): raise ValueError("Task is not pending.") return self._task_change_status(Status.COMPLETED, validate, **kw)
python
def task_done(self, **kw): """ Marks a pending task as done, optionally specifying a completion date with the 'end' argument. """ def validate(task): if not Status.is_pending(task['status']): raise ValueError("Task is not pending.") return self._task_change_status(Status.COMPLETED, validate, **kw)
[ "def", "task_done", "(", "self", ",", "*", "*", "kw", ")", ":", "def", "validate", "(", "task", ")", ":", "if", "not", "Status", ".", "is_pending", "(", "task", "[", "'status'", "]", ")", ":", "raise", "ValueError", "(", "\"Task is not pending.\"", ")", "return", "self", ".", "_task_change_status", "(", "Status", ".", "COMPLETED", ",", "validate", ",", "*", "*", "kw", ")" ]
Marks a pending task as done, optionally specifying a completion date with the 'end' argument.
[ "Marks", "a", "pending", "task", "as", "done", "optionally", "specifying", "a", "completion", "date", "with", "the", "end", "argument", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L289-L298
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorDirect.task_delete
def task_delete(self, **kw): """ Marks a task as deleted, optionally specifying a completion date with the 'end' argument. """ def validate(task): if task['status'] == Status.DELETED: raise ValueError("Task is already deleted.") return self._task_change_status(Status.DELETED, validate, **kw)
python
def task_delete(self, **kw): """ Marks a task as deleted, optionally specifying a completion date with the 'end' argument. """ def validate(task): if task['status'] == Status.DELETED: raise ValueError("Task is already deleted.") return self._task_change_status(Status.DELETED, validate, **kw)
[ "def", "task_delete", "(", "self", ",", "*", "*", "kw", ")", ":", "def", "validate", "(", "task", ")", ":", "if", "task", "[", "'status'", "]", "==", "Status", ".", "DELETED", ":", "raise", "ValueError", "(", "\"Task is already deleted.\"", ")", "return", "self", ".", "_task_change_status", "(", "Status", ".", "DELETED", ",", "validate", ",", "*", "*", "kw", ")" ]
Marks a task as deleted, optionally specifying a completion date with the 'end' argument.
[ "Marks", "a", "task", "as", "deleted", "optionally", "specifying", "a", "completion", "date", "with", "the", "end", "argument", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L319-L328
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout._execute
def _execute(self, *args): """ Execute a given taskwarrior command with arguments Returns a 2-tuple of stdout and stderr (respectively). """ command = ( [ 'task', 'rc:%s' % self.config_filename, ] + self.get_configuration_override_args() + [six.text_type(arg) for arg in args] ) # subprocess is expecting bytestrings only, so nuke unicode if present # and remove control characters for i in range(len(command)): if isinstance(command[i], six.text_type): command[i] = ( taskw.utils.clean_ctrl_chars(command[i].encode('utf-8'))) try: proc = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = proc.communicate() except OSError as e: if e.errno == errno.ENOENT: raise OSError("Unable to find the 'task' command-line tool.") raise if proc.returncode != 0: raise TaskwarriorError(command, stderr, stdout, proc.returncode) # We should get bytes from the outside world. Turn those into unicode # as soon as we can. # Everything going into and coming out of taskwarrior *should* be # utf-8, but there are weird edge cases where something totally unusual # made it in.. so we need to be able to handle (or at least try to # handle) whatever. Kitchen tries its best. try: stdout = stdout.decode(self.config.get('encoding', 'utf-8')) except UnicodeDecodeError as e: stdout = kitchen.text.converters.to_unicode(stdout) try: stderr = stderr.decode(self.config.get('encoding', 'utf-8')) except UnicodeDecodeError as e: stderr = kitchen.text.converters.to_unicode(stderr) # strip any crazy terminal escape characters like bells, backspaces, # and form feeds for c in ('\a', '\b', '\f', ''): stdout = stdout.replace(c, '?') stderr = stderr.replace(c, '?') return stdout, stderr
python
def _execute(self, *args): """ Execute a given taskwarrior command with arguments Returns a 2-tuple of stdout and stderr (respectively). """ command = ( [ 'task', 'rc:%s' % self.config_filename, ] + self.get_configuration_override_args() + [six.text_type(arg) for arg in args] ) # subprocess is expecting bytestrings only, so nuke unicode if present # and remove control characters for i in range(len(command)): if isinstance(command[i], six.text_type): command[i] = ( taskw.utils.clean_ctrl_chars(command[i].encode('utf-8'))) try: proc = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = proc.communicate() except OSError as e: if e.errno == errno.ENOENT: raise OSError("Unable to find the 'task' command-line tool.") raise if proc.returncode != 0: raise TaskwarriorError(command, stderr, stdout, proc.returncode) # We should get bytes from the outside world. Turn those into unicode # as soon as we can. # Everything going into and coming out of taskwarrior *should* be # utf-8, but there are weird edge cases where something totally unusual # made it in.. so we need to be able to handle (or at least try to # handle) whatever. Kitchen tries its best. try: stdout = stdout.decode(self.config.get('encoding', 'utf-8')) except UnicodeDecodeError as e: stdout = kitchen.text.converters.to_unicode(stdout) try: stderr = stderr.decode(self.config.get('encoding', 'utf-8')) except UnicodeDecodeError as e: stderr = kitchen.text.converters.to_unicode(stderr) # strip any crazy terminal escape characters like bells, backspaces, # and form feeds for c in ('\a', '\b', '\f', ''): stdout = stdout.replace(c, '?') stderr = stderr.replace(c, '?') return stdout, stderr
[ "def", "_execute", "(", "self", ",", "*", "args", ")", ":", "command", "=", "(", "[", "'task'", ",", "'rc:%s'", "%", "self", ".", "config_filename", ",", "]", "+", "self", ".", "get_configuration_override_args", "(", ")", "+", "[", "six", ".", "text_type", "(", "arg", ")", "for", "arg", "in", "args", "]", ")", "# subprocess is expecting bytestrings only, so nuke unicode if present", "# and remove control characters", "for", "i", "in", "range", "(", "len", "(", "command", ")", ")", ":", "if", "isinstance", "(", "command", "[", "i", "]", ",", "six", ".", "text_type", ")", ":", "command", "[", "i", "]", "=", "(", "taskw", ".", "utils", ".", "clean_ctrl_chars", "(", "command", "[", "i", "]", ".", "encode", "(", "'utf-8'", ")", ")", ")", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", ")", "stdout", ",", "stderr", "=", "proc", ".", "communicate", "(", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "raise", "OSError", "(", "\"Unable to find the 'task' command-line tool.\"", ")", "raise", "if", "proc", ".", "returncode", "!=", "0", ":", "raise", "TaskwarriorError", "(", "command", ",", "stderr", ",", "stdout", ",", "proc", ".", "returncode", ")", "# We should get bytes from the outside world. Turn those into unicode", "# as soon as we can.", "# Everything going into and coming out of taskwarrior *should* be", "# utf-8, but there are weird edge cases where something totally unusual", "# made it in.. so we need to be able to handle (or at least try to", "# handle) whatever. Kitchen tries its best.", "try", ":", "stdout", "=", "stdout", ".", "decode", "(", "self", ".", "config", ".", "get", "(", "'encoding'", ",", "'utf-8'", ")", ")", "except", "UnicodeDecodeError", "as", "e", ":", "stdout", "=", "kitchen", ".", "text", ".", "converters", ".", "to_unicode", "(", "stdout", ")", "try", ":", "stderr", "=", "stderr", ".", "decode", "(", "self", ".", "config", ".", "get", "(", "'encoding'", ",", "'utf-8'", ")", ")", "except", "UnicodeDecodeError", "as", "e", ":", "stderr", "=", "kitchen", ".", "text", ".", "converters", ".", "to_unicode", "(", "stderr", ")", "# strip any crazy terminal escape characters like bells, backspaces,", "# and form feeds", "for", "c", "in", "(", "'\\a'", ",", "'\\b'", ",", "'\\f'", ",", "'\u001b'", ")", ":", "stdout", "=", "stdout", ".", "replace", "(", "c", ",", "'?'", ")", "stderr", "=", "stderr", ".", "replace", "(", "c", ",", "'?'", ")", "return", "stdout", ",", "stderr" ]
Execute a given taskwarrior command with arguments Returns a 2-tuple of stdout and stderr (respectively).
[ "Execute", "a", "given", "taskwarrior", "command", "with", "arguments" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L441-L499
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.load_tasks
def load_tasks(self, command='all'): """ Returns a dictionary of tasks for a list of command.""" results = dict( (db, self._get_task_objects('status:%s' % db, 'export')) for db in Command.files(command) ) # 'waiting' tasks are returned separately from 'pending' tasks # Here we merge the waiting list back into the pending list. if 'pending' in results: results['pending'].extend( self._get_task_objects('status:waiting', 'export')) return results
python
def load_tasks(self, command='all'): """ Returns a dictionary of tasks for a list of command.""" results = dict( (db, self._get_task_objects('status:%s' % db, 'export')) for db in Command.files(command) ) # 'waiting' tasks are returned separately from 'pending' tasks # Here we merge the waiting list back into the pending list. if 'pending' in results: results['pending'].extend( self._get_task_objects('status:waiting', 'export')) return results
[ "def", "load_tasks", "(", "self", ",", "command", "=", "'all'", ")", ":", "results", "=", "dict", "(", "(", "db", ",", "self", ".", "_get_task_objects", "(", "'status:%s'", "%", "db", ",", "'export'", ")", ")", "for", "db", "in", "Command", ".", "files", "(", "command", ")", ")", "# 'waiting' tasks are returned separately from 'pending' tasks", "# Here we merge the waiting list back into the pending list.", "if", "'pending'", "in", "results", ":", "results", "[", "'pending'", "]", ".", "extend", "(", "self", ".", "_get_task_objects", "(", "'status:waiting'", ",", "'export'", ")", ")", "return", "results" ]
Returns a dictionary of tasks for a list of command.
[ "Returns", "a", "dictionary", "of", "tasks", "for", "a", "list", "of", "command", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L572-L586
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.filter_tasks
def filter_tasks(self, filter_dict): """ Return a filtered list of tasks from taskwarrior. Filter dict should be a dictionary mapping filter constraints with their values. For example, to return only pending tasks, you could use:: {'status': 'pending'} Or, to return tasks that have the word "Abjad" in their description that are also pending:: { 'status': 'pending', 'description.contains': 'Abjad', } Filters can be quite complex, and are documented on Taskwarrior's website. """ query_args = taskw.utils.encode_query(filter_dict, self.get_version()) return self._get_task_objects( 'export', *query_args )
python
def filter_tasks(self, filter_dict): """ Return a filtered list of tasks from taskwarrior. Filter dict should be a dictionary mapping filter constraints with their values. For example, to return only pending tasks, you could use:: {'status': 'pending'} Or, to return tasks that have the word "Abjad" in their description that are also pending:: { 'status': 'pending', 'description.contains': 'Abjad', } Filters can be quite complex, and are documented on Taskwarrior's website. """ query_args = taskw.utils.encode_query(filter_dict, self.get_version()) return self._get_task_objects( 'export', *query_args )
[ "def", "filter_tasks", "(", "self", ",", "filter_dict", ")", ":", "query_args", "=", "taskw", ".", "utils", ".", "encode_query", "(", "filter_dict", ",", "self", ".", "get_version", "(", ")", ")", "return", "self", ".", "_get_task_objects", "(", "'export'", ",", "*", "query_args", ")" ]
Return a filtered list of tasks from taskwarrior. Filter dict should be a dictionary mapping filter constraints with their values. For example, to return only pending tasks, you could use:: {'status': 'pending'} Or, to return tasks that have the word "Abjad" in their description that are also pending:: { 'status': 'pending', 'description.contains': 'Abjad', } Filters can be quite complex, and are documented on Taskwarrior's website.
[ "Return", "a", "filtered", "list", "of", "tasks", "from", "taskwarrior", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L588-L613
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.task_annotate
def task_annotate(self, task, annotation): """ Annotates a task. """ self._execute( task['uuid'], 'annotate', '--', annotation ) id, annotated_task = self.get_task(uuid=task[six.u('uuid')]) return annotated_task
python
def task_annotate(self, task, annotation): """ Annotates a task. """ self._execute( task['uuid'], 'annotate', '--', annotation ) id, annotated_task = self.get_task(uuid=task[six.u('uuid')]) return annotated_task
[ "def", "task_annotate", "(", "self", ",", "task", ",", "annotation", ")", ":", "self", ".", "_execute", "(", "task", "[", "'uuid'", "]", ",", "'annotate'", ",", "'--'", ",", "annotation", ")", "id", ",", "annotated_task", "=", "self", ".", "get_task", "(", "uuid", "=", "task", "[", "six", ".", "u", "(", "'uuid'", ")", "]", ")", "return", "annotated_task" ]
Annotates a task.
[ "Annotates", "a", "task", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L710-L719
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.task_denotate
def task_denotate(self, task, annotation): """ Removes an annotation from a task. """ self._execute( task['uuid'], 'denotate', '--', annotation ) id, denotated_task = self.get_task(uuid=task[six.u('uuid')]) return denotated_task
python
def task_denotate(self, task, annotation): """ Removes an annotation from a task. """ self._execute( task['uuid'], 'denotate', '--', annotation ) id, denotated_task = self.get_task(uuid=task[six.u('uuid')]) return denotated_task
[ "def", "task_denotate", "(", "self", ",", "task", ",", "annotation", ")", ":", "self", ".", "_execute", "(", "task", "[", "'uuid'", "]", ",", "'denotate'", ",", "'--'", ",", "annotation", ")", "id", ",", "denotated_task", "=", "self", ".", "get_task", "(", "uuid", "=", "task", "[", "six", ".", "u", "(", "'uuid'", ")", "]", ")", "return", "denotated_task" ]
Removes an annotation from a task.
[ "Removes", "an", "annotation", "from", "a", "task", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L721-L730
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.task_delete
def task_delete(self, **kw): """ Marks a task as deleted. """ id, task = self.get_task(**kw) if task['status'] == Status.DELETED: raise ValueError("Task is already deleted.") self._execute(id, 'delete') return self.get_task(uuid=task['uuid'])[1]
python
def task_delete(self, **kw): """ Marks a task as deleted. """ id, task = self.get_task(**kw) if task['status'] == Status.DELETED: raise ValueError("Task is already deleted.") self._execute(id, 'delete') return self.get_task(uuid=task['uuid'])[1]
[ "def", "task_delete", "(", "self", ",", "*", "*", "kw", ")", ":", "id", ",", "task", "=", "self", ".", "get_task", "(", "*", "*", "kw", ")", "if", "task", "[", "'status'", "]", "==", "Status", ".", "DELETED", ":", "raise", "ValueError", "(", "\"Task is already deleted.\"", ")", "self", ".", "_execute", "(", "id", ",", "'delete'", ")", "return", "self", ".", "get_task", "(", "uuid", "=", "task", "[", "'uuid'", "]", ")", "[", "1", "]" ]
Marks a task as deleted.
[ "Marks", "a", "task", "as", "deleted", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L816-L825
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.task_start
def task_start(self, **kw): """ Marks a task as started. """ id, task = self.get_task(**kw) self._execute(id, 'start') return self.get_task(uuid=task['uuid'])[1]
python
def task_start(self, **kw): """ Marks a task as started. """ id, task = self.get_task(**kw) self._execute(id, 'start') return self.get_task(uuid=task['uuid'])[1]
[ "def", "task_start", "(", "self", ",", "*", "*", "kw", ")", ":", "id", ",", "task", "=", "self", ".", "get_task", "(", "*", "*", "kw", ")", "self", ".", "_execute", "(", "id", ",", "'start'", ")", "return", "self", ".", "get_task", "(", "uuid", "=", "task", "[", "'uuid'", "]", ")", "[", "1", "]" ]
Marks a task as started.
[ "Marks", "a", "task", "as", "started", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L827-L833
train
ralphbean/taskw
taskw/warrior.py
TaskWarriorShellout.task_stop
def task_stop(self, **kw): """ Marks a task as stopped. """ id, task = self.get_task(**kw) self._execute(id, 'stop') return self.get_task(uuid=task['uuid'])[1]
python
def task_stop(self, **kw): """ Marks a task as stopped. """ id, task = self.get_task(**kw) self._execute(id, 'stop') return self.get_task(uuid=task['uuid'])[1]
[ "def", "task_stop", "(", "self", ",", "*", "*", "kw", ")", ":", "id", ",", "task", "=", "self", ".", "get_task", "(", "*", "*", "kw", ")", "self", ".", "_execute", "(", "id", ",", "'stop'", ")", "return", "self", ".", "get_task", "(", "uuid", "=", "task", "[", "'uuid'", "]", ")", "[", "1", "]" ]
Marks a task as stopped.
[ "Marks", "a", "task", "as", "stopped", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L835-L841
train
ralphbean/taskw
taskw/warrior.py
Status.to_file
def to_file(cls, status): """ Returns the file in which this task is stored. """ return { Status.PENDING: DataFile.PENDING, Status.WAITING: DataFile.PENDING, Status.COMPLETED: DataFile.COMPLETED, Status.DELETED: DataFile.COMPLETED }[status]
python
def to_file(cls, status): """ Returns the file in which this task is stored. """ return { Status.PENDING: DataFile.PENDING, Status.WAITING: DataFile.PENDING, Status.COMPLETED: DataFile.COMPLETED, Status.DELETED: DataFile.COMPLETED }[status]
[ "def", "to_file", "(", "cls", ",", "status", ")", ":", "return", "{", "Status", ".", "PENDING", ":", "DataFile", ".", "PENDING", ",", "Status", ".", "WAITING", ":", "DataFile", ".", "PENDING", ",", "Status", ".", "COMPLETED", ":", "DataFile", ".", "COMPLETED", ",", "Status", ".", "DELETED", ":", "DataFile", ".", "COMPLETED", "}", "[", "status", "]" ]
Returns the file in which this task is stored.
[ "Returns", "the", "file", "in", "which", "this", "task", "is", "stored", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/warrior.py#L896-L903
train
ralphbean/taskw
taskw/task.py
Task.from_stub
def from_stub(cls, data, udas=None): """ Create a Task from an already deserialized dict. """ udas = udas or {} fields = cls.FIELDS.copy() fields.update(udas) processed = {} for k, v in six.iteritems(data): processed[k] = cls._serialize(k, v, fields) return cls(processed, udas)
python
def from_stub(cls, data, udas=None): """ Create a Task from an already deserialized dict. """ udas = udas or {} fields = cls.FIELDS.copy() fields.update(udas) processed = {} for k, v in six.iteritems(data): processed[k] = cls._serialize(k, v, fields) return cls(processed, udas)
[ "def", "from_stub", "(", "cls", ",", "data", ",", "udas", "=", "None", ")", ":", "udas", "=", "udas", "or", "{", "}", "fields", "=", "cls", ".", "FIELDS", ".", "copy", "(", ")", "fields", ".", "update", "(", "udas", ")", "processed", "=", "{", "}", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "data", ")", ":", "processed", "[", "k", "]", "=", "cls", ".", "_serialize", "(", "k", ",", "v", ",", "fields", ")", "return", "cls", "(", "processed", ",", "udas", ")" ]
Create a Task from an already deserialized dict.
[ "Create", "a", "Task", "from", "an", "already", "deserialized", "dict", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L82-L93
train
ralphbean/taskw
taskw/task.py
Task.from_input
def from_input(cls, input_file=sys.stdin, modify=False, udas=None): """ Create a Task directly from stdin by reading one line. If modify=True, two lines are expected, which is consistent with the Taskwarrior hook system. The first line is interpreted as the original state of the Task, and the second one as the new, modified state. :param input_file: Input file. Defaults to sys.stdin. :param modify: Flag for on-modify hook event. Defaults to False. :param udas: Taskrc udas. Defaults to None. :return Task """ original_task = input_file.readline().strip() if modify: modified_task = input_file.readline().strip() return cls(json.loads(modified_task), udas=udas) return cls(json.loads(original_task), udas=udas)
python
def from_input(cls, input_file=sys.stdin, modify=False, udas=None): """ Create a Task directly from stdin by reading one line. If modify=True, two lines are expected, which is consistent with the Taskwarrior hook system. The first line is interpreted as the original state of the Task, and the second one as the new, modified state. :param input_file: Input file. Defaults to sys.stdin. :param modify: Flag for on-modify hook event. Defaults to False. :param udas: Taskrc udas. Defaults to None. :return Task """ original_task = input_file.readline().strip() if modify: modified_task = input_file.readline().strip() return cls(json.loads(modified_task), udas=udas) return cls(json.loads(original_task), udas=udas)
[ "def", "from_input", "(", "cls", ",", "input_file", "=", "sys", ".", "stdin", ",", "modify", "=", "False", ",", "udas", "=", "None", ")", ":", "original_task", "=", "input_file", ".", "readline", "(", ")", ".", "strip", "(", ")", "if", "modify", ":", "modified_task", "=", "input_file", ".", "readline", "(", ")", ".", "strip", "(", ")", "return", "cls", "(", "json", ".", "loads", "(", "modified_task", ")", ",", "udas", "=", "udas", ")", "return", "cls", "(", "json", ".", "loads", "(", "original_task", ")", ",", "udas", "=", "udas", ")" ]
Create a Task directly from stdin by reading one line. If modify=True, two lines are expected, which is consistent with the Taskwarrior hook system. The first line is interpreted as the original state of the Task, and the second one as the new, modified state. :param input_file: Input file. Defaults to sys.stdin. :param modify: Flag for on-modify hook event. Defaults to False. :param udas: Taskrc udas. Defaults to None. :return Task
[ "Create", "a", "Task", "directly", "from", "stdin", "by", "reading", "one", "line", ".", "If", "modify", "=", "True", "two", "lines", "are", "expected", "which", "is", "consistent", "with", "the", "Taskwarrior", "hook", "system", ".", "The", "first", "line", "is", "interpreted", "as", "the", "original", "state", "of", "the", "Task", "and", "the", "second", "one", "as", "the", "new", "modified", "state", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L96-L112
train
ralphbean/taskw
taskw/task.py
Task._deserialize
def _deserialize(cls, key, value, fields): """ Marshal incoming data into Python objects.""" converter = cls._get_converter_for_field(key, None, fields) return converter.deserialize(value)
python
def _deserialize(cls, key, value, fields): """ Marshal incoming data into Python objects.""" converter = cls._get_converter_for_field(key, None, fields) return converter.deserialize(value)
[ "def", "_deserialize", "(", "cls", ",", "key", ",", "value", ",", "fields", ")", ":", "converter", "=", "cls", ".", "_get_converter_for_field", "(", "key", ",", "None", ",", "fields", ")", "return", "converter", ".", "deserialize", "(", "value", ")" ]
Marshal incoming data into Python objects.
[ "Marshal", "incoming", "data", "into", "Python", "objects", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L123-L126
train
ralphbean/taskw
taskw/task.py
Task._serialize
def _serialize(cls, key, value, fields): """ Marshal outgoing data into Taskwarrior's JSON format.""" converter = cls._get_converter_for_field(key, None, fields) return converter.serialize(value)
python
def _serialize(cls, key, value, fields): """ Marshal outgoing data into Taskwarrior's JSON format.""" converter = cls._get_converter_for_field(key, None, fields) return converter.serialize(value)
[ "def", "_serialize", "(", "cls", ",", "key", ",", "value", ",", "fields", ")", ":", "converter", "=", "cls", ".", "_get_converter_for_field", "(", "key", ",", "None", ",", "fields", ")", "return", "converter", ".", "serialize", "(", "value", ")" ]
Marshal outgoing data into Taskwarrior's JSON format.
[ "Marshal", "outgoing", "data", "into", "Taskwarrior", "s", "JSON", "format", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L129-L132
train
ralphbean/taskw
taskw/task.py
Task.get_changes
def get_changes(self, serialized=False, keep=False): """ Get a journal of changes that have occurred :param `serialized`: Return changes in the serialized format used by TaskWarrior. :param `keep_changes`: By default, the list of changes is reset after running ``.get_changes``; set this to `True` if you would like to keep the changes recorded following running this command. :returns: A dictionary of 2-tuples of changes, where the key is the name of the field that has changed, and the value is a 2-tuple containing the original value and the final value respectively. """ results = {} # Check for explicitly-registered changes for k, f, t in self._changes: if k not in results: results[k] = [f, None] results[k][1] = ( self._serialize(k, t, self._fields) if serialized else t ) # Check for changes on subordinate items for k, v in six.iteritems(self): if isinstance(v, Dirtyable): result = v.get_changes(keep=keep) if result: if not k in results: results[k] = [result[0], None] results[k][1] = ( self._serialize(k, result[1], self._fields) if serialized else result[1] ) # Clear out recorded changes if not keep: self._changes = [] return results
python
def get_changes(self, serialized=False, keep=False): """ Get a journal of changes that have occurred :param `serialized`: Return changes in the serialized format used by TaskWarrior. :param `keep_changes`: By default, the list of changes is reset after running ``.get_changes``; set this to `True` if you would like to keep the changes recorded following running this command. :returns: A dictionary of 2-tuples of changes, where the key is the name of the field that has changed, and the value is a 2-tuple containing the original value and the final value respectively. """ results = {} # Check for explicitly-registered changes for k, f, t in self._changes: if k not in results: results[k] = [f, None] results[k][1] = ( self._serialize(k, t, self._fields) if serialized else t ) # Check for changes on subordinate items for k, v in six.iteritems(self): if isinstance(v, Dirtyable): result = v.get_changes(keep=keep) if result: if not k in results: results[k] = [result[0], None] results[k][1] = ( self._serialize(k, result[1], self._fields) if serialized else result[1] ) # Clear out recorded changes if not keep: self._changes = [] return results
[ "def", "get_changes", "(", "self", ",", "serialized", "=", "False", ",", "keep", "=", "False", ")", ":", "results", "=", "{", "}", "# Check for explicitly-registered changes", "for", "k", ",", "f", ",", "t", "in", "self", ".", "_changes", ":", "if", "k", "not", "in", "results", ":", "results", "[", "k", "]", "=", "[", "f", ",", "None", "]", "results", "[", "k", "]", "[", "1", "]", "=", "(", "self", ".", "_serialize", "(", "k", ",", "t", ",", "self", ".", "_fields", ")", "if", "serialized", "else", "t", ")", "# Check for changes on subordinate items", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "self", ")", ":", "if", "isinstance", "(", "v", ",", "Dirtyable", ")", ":", "result", "=", "v", ".", "get_changes", "(", "keep", "=", "keep", ")", "if", "result", ":", "if", "not", "k", "in", "results", ":", "results", "[", "k", "]", "=", "[", "result", "[", "0", "]", ",", "None", "]", "results", "[", "k", "]", "[", "1", "]", "=", "(", "self", ".", "_serialize", "(", "k", ",", "result", "[", "1", "]", ",", "self", ".", "_fields", ")", "if", "serialized", "else", "result", "[", "1", "]", ")", "# Clear out recorded changes", "if", "not", "keep", ":", "self", ".", "_changes", "=", "[", "]", "return", "results" ]
Get a journal of changes that have occurred :param `serialized`: Return changes in the serialized format used by TaskWarrior. :param `keep_changes`: By default, the list of changes is reset after running ``.get_changes``; set this to `True` if you would like to keep the changes recorded following running this command. :returns: A dictionary of 2-tuples of changes, where the key is the name of the field that has changed, and the value is a 2-tuple containing the original value and the final value respectively.
[ "Get", "a", "journal", "of", "changes", "that", "have", "occurred" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L149-L191
train
ralphbean/taskw
taskw/task.py
Task.update
def update(self, values, force=False): """ Update this task dictionary :returns: A dictionary mapping field names specified to be updated and a boolean value indicating whether the field was changed. """ results = {} for k, v in six.iteritems(values): results[k] = self.__setitem__(k, v, force=force) return results
python
def update(self, values, force=False): """ Update this task dictionary :returns: A dictionary mapping field names specified to be updated and a boolean value indicating whether the field was changed. """ results = {} for k, v in six.iteritems(values): results[k] = self.__setitem__(k, v, force=force) return results
[ "def", "update", "(", "self", ",", "values", ",", "force", "=", "False", ")", ":", "results", "=", "{", "}", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "values", ")", ":", "results", "[", "k", "]", "=", "self", ".", "__setitem__", "(", "k", ",", "v", ",", "force", "=", "force", ")", "return", "results" ]
Update this task dictionary :returns: A dictionary mapping field names specified to be updated and a boolean value indicating whether the field was changed.
[ "Update", "this", "task", "dictionary" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L193-L203
train
ralphbean/taskw
taskw/task.py
Task.set
def set(self, key, value): """ Set a key's value regardless of whether a change is seen.""" return self.__setitem__(key, value, force=True)
python
def set(self, key, value): """ Set a key's value regardless of whether a change is seen.""" return self.__setitem__(key, value, force=True)
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "return", "self", ".", "__setitem__", "(", "key", ",", "value", ",", "force", "=", "True", ")" ]
Set a key's value regardless of whether a change is seen.
[ "Set", "a", "key", "s", "value", "regardless", "of", "whether", "a", "change", "is", "seen", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L205-L207
train
ralphbean/taskw
taskw/task.py
Task.serialized
def serialized(self): """ Returns a serialized representation of this task.""" serialized = {} for k, v in six.iteritems(self): serialized[k] = self._serialize(k, v, self._fields) return serialized
python
def serialized(self): """ Returns a serialized representation of this task.""" serialized = {} for k, v in six.iteritems(self): serialized[k] = self._serialize(k, v, self._fields) return serialized
[ "def", "serialized", "(", "self", ")", ":", "serialized", "=", "{", "}", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "self", ")", ":", "serialized", "[", "k", "]", "=", "self", ".", "_serialize", "(", "k", ",", "v", ",", "self", ".", "_fields", ")", "return", "serialized" ]
Returns a serialized representation of this task.
[ "Returns", "a", "serialized", "representation", "of", "this", "task", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/task.py#L209-L214
train
ralphbean/taskw
taskw/utils.py
encode_task_experimental
def encode_task_experimental(task): """ Convert a dict-like task to its string representation Used for adding a task via `task add` """ # First, clean the task: task = task.copy() if 'tags' in task: task['tags'] = ','.join(task['tags']) for k in task: task[k] = encode_task_value(k, task[k]) # Then, format it as a string return [ "%s:\"%s\"" % (k, v) if v else "%s:" % (k, ) for k, v in sorted(task.items(), key=itemgetter(0)) ]
python
def encode_task_experimental(task): """ Convert a dict-like task to its string representation Used for adding a task via `task add` """ # First, clean the task: task = task.copy() if 'tags' in task: task['tags'] = ','.join(task['tags']) for k in task: task[k] = encode_task_value(k, task[k]) # Then, format it as a string return [ "%s:\"%s\"" % (k, v) if v else "%s:" % (k, ) for k, v in sorted(task.items(), key=itemgetter(0)) ]
[ "def", "encode_task_experimental", "(", "task", ")", ":", "# First, clean the task:", "task", "=", "task", ".", "copy", "(", ")", "if", "'tags'", "in", "task", ":", "task", "[", "'tags'", "]", "=", "','", ".", "join", "(", "task", "[", "'tags'", "]", ")", "for", "k", "in", "task", ":", "task", "[", "k", "]", "=", "encode_task_value", "(", "k", ",", "task", "[", "k", "]", ")", "# Then, format it as a string", "return", "[", "\"%s:\\\"%s\\\"\"", "%", "(", "k", ",", "v", ")", "if", "v", "else", "\"%s:\"", "%", "(", "k", ",", ")", "for", "k", ",", "v", "in", "sorted", "(", "task", ".", "items", "(", ")", ",", "key", "=", "itemgetter", "(", "0", ")", ")", "]" ]
Convert a dict-like task to its string representation Used for adding a task via `task add`
[ "Convert", "a", "dict", "-", "like", "task", "to", "its", "string", "representation", "Used", "for", "adding", "a", "task", "via", "task", "add" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/utils.py#L125-L140
train
ralphbean/taskw
taskw/utils.py
encode_task
def encode_task(task): """ Convert a dict-like task to its string representation """ # First, clean the task: task = task.copy() if 'tags' in task: task['tags'] = ','.join(task['tags']) for k in task: for unsafe, safe in six.iteritems(encode_replacements): if isinstance(task[k], six.string_types): task[k] = task[k].replace(unsafe, safe) if isinstance(task[k], datetime.datetime): task[k] = task[k].strftime("%Y%m%dT%M%H%SZ") # Then, format it as a string return "[%s]\n" % " ".join([ "%s:\"%s\"" % (k, v) for k, v in sorted(task.items(), key=itemgetter(0)) ])
python
def encode_task(task): """ Convert a dict-like task to its string representation """ # First, clean the task: task = task.copy() if 'tags' in task: task['tags'] = ','.join(task['tags']) for k in task: for unsafe, safe in six.iteritems(encode_replacements): if isinstance(task[k], six.string_types): task[k] = task[k].replace(unsafe, safe) if isinstance(task[k], datetime.datetime): task[k] = task[k].strftime("%Y%m%dT%M%H%SZ") # Then, format it as a string return "[%s]\n" % " ".join([ "%s:\"%s\"" % (k, v) for k, v in sorted(task.items(), key=itemgetter(0)) ])
[ "def", "encode_task", "(", "task", ")", ":", "# First, clean the task:", "task", "=", "task", ".", "copy", "(", ")", "if", "'tags'", "in", "task", ":", "task", "[", "'tags'", "]", "=", "','", ".", "join", "(", "task", "[", "'tags'", "]", ")", "for", "k", "in", "task", ":", "for", "unsafe", ",", "safe", "in", "six", ".", "iteritems", "(", "encode_replacements", ")", ":", "if", "isinstance", "(", "task", "[", "k", "]", ",", "six", ".", "string_types", ")", ":", "task", "[", "k", "]", "=", "task", "[", "k", "]", ".", "replace", "(", "unsafe", ",", "safe", ")", "if", "isinstance", "(", "task", "[", "k", "]", ",", "datetime", ".", "datetime", ")", ":", "task", "[", "k", "]", "=", "task", "[", "k", "]", ".", "strftime", "(", "\"%Y%m%dT%M%H%SZ\"", ")", "# Then, format it as a string", "return", "\"[%s]\\n\"", "%", "\" \"", ".", "join", "(", "[", "\"%s:\\\"%s\\\"\"", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "sorted", "(", "task", ".", "items", "(", ")", ",", "key", "=", "itemgetter", "(", "0", ")", ")", "]", ")" ]
Convert a dict-like task to its string representation
[ "Convert", "a", "dict", "-", "like", "task", "to", "its", "string", "representation" ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/utils.py#L143-L161
train
ralphbean/taskw
taskw/utils.py
convert_dict_to_override_args
def convert_dict_to_override_args(config, prefix=''): """ Converts a dictionary of override arguments into CLI arguments. * Converts leaf nodes into dot paths of key names leading to the leaf node. * Does not include paths to leaf nodes not being non-dictionary type. See `taskw.test.test_utils.TestUtils.test_convert_dict_to_override_args` for details. """ args = [] for k, v in six.iteritems(config): if isinstance(v, dict): args.extend( convert_dict_to_override_args( v, prefix='.'.join([ prefix, k, ]) if prefix else k ) ) else: v = six.text_type(v) left = 'rc' + (('.' + prefix) if prefix else '') + '.' + k right = v if ' ' not in v else '"%s"' % v args.append('='.join([left, right])) return args
python
def convert_dict_to_override_args(config, prefix=''): """ Converts a dictionary of override arguments into CLI arguments. * Converts leaf nodes into dot paths of key names leading to the leaf node. * Does not include paths to leaf nodes not being non-dictionary type. See `taskw.test.test_utils.TestUtils.test_convert_dict_to_override_args` for details. """ args = [] for k, v in six.iteritems(config): if isinstance(v, dict): args.extend( convert_dict_to_override_args( v, prefix='.'.join([ prefix, k, ]) if prefix else k ) ) else: v = six.text_type(v) left = 'rc' + (('.' + prefix) if prefix else '') + '.' + k right = v if ' ' not in v else '"%s"' % v args.append('='.join([left, right])) return args
[ "def", "convert_dict_to_override_args", "(", "config", ",", "prefix", "=", "''", ")", ":", "args", "=", "[", "]", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "config", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "args", ".", "extend", "(", "convert_dict_to_override_args", "(", "v", ",", "prefix", "=", "'.'", ".", "join", "(", "[", "prefix", ",", "k", ",", "]", ")", "if", "prefix", "else", "k", ")", ")", "else", ":", "v", "=", "six", ".", "text_type", "(", "v", ")", "left", "=", "'rc'", "+", "(", "(", "'.'", "+", "prefix", ")", "if", "prefix", "else", "''", ")", "+", "'.'", "+", "k", "right", "=", "v", "if", "' '", "not", "in", "v", "else", "'\"%s\"'", "%", "v", "args", ".", "append", "(", "'='", ".", "join", "(", "[", "left", ",", "right", "]", ")", ")", "return", "args" ]
Converts a dictionary of override arguments into CLI arguments. * Converts leaf nodes into dot paths of key names leading to the leaf node. * Does not include paths to leaf nodes not being non-dictionary type. See `taskw.test.test_utils.TestUtils.test_convert_dict_to_override_args` for details.
[ "Converts", "a", "dictionary", "of", "override", "arguments", "into", "CLI", "arguments", "." ]
11e2f9132eaedd157f514538de9b5f3b69c30a52
https://github.com/ralphbean/taskw/blob/11e2f9132eaedd157f514538de9b5f3b69c30a52/taskw/utils.py#L235-L263
train
twoolie/NBT
examples/block_analysis.py
stats_per_chunk
def stats_per_chunk(chunk): """Given a chunk, increment the block types with the number of blocks found""" for block_id in chunk.iter_block(): try: block_counts[block_id] += 1 except KeyError: block_counts[block_id] = 1
python
def stats_per_chunk(chunk): """Given a chunk, increment the block types with the number of blocks found""" for block_id in chunk.iter_block(): try: block_counts[block_id] += 1 except KeyError: block_counts[block_id] = 1
[ "def", "stats_per_chunk", "(", "chunk", ")", ":", "for", "block_id", "in", "chunk", ".", "iter_block", "(", ")", ":", "try", ":", "block_counts", "[", "block_id", "]", "+=", "1", "except", "KeyError", ":", "block_counts", "[", "block_id", "]", "=", "1" ]
Given a chunk, increment the block types with the number of blocks found
[ "Given", "a", "chunk", "increment", "the", "block", "types", "with", "the", "number", "of", "blocks", "found" ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/examples/block_analysis.py#L23-L30
train
twoolie/NBT
examples/block_analysis.py
bounded_stats_per_chunk
def bounded_stats_per_chunk(chunk, block_counts, start, stop): """Given a chunk, return the number of blocks types within the specified selection""" chunk_z, chunk_x = chunk.get_coords() for z in range(16): world_z = z + chunk_z*16 if ( (start != None and world_z < int(start[2])) or (stop != None and world_z > int(stop[2])) ): # Outside the bounding box; skip to next iteration #print("Z break: %d,%d,%d" % (world_z,start[2],stop[2])) break for x in range(16): world_x = x + chunk_x*16 if ( (start != None and world_x < int(start[0])) or (stop != None and world_x > int(stop[0])) ): # Outside the bounding box; skip to next iteration #print("X break: %d,%d,%d" % (world_x,start[0],stop[0])) break for y in range(chunk.get_max_height() + 1): if ( (start != None and y < int(start[1])) or (stop != None and y > int(stop[1])) ): # Outside the bounding box; skip to next iteration #print("Y break: %d,%d,%d" % (y,start[1],stop[1])) break #print("Chunk: %d,%d Coord: %d,%d,%d" % (c['x'], c['z'],x,y,z)) block_id = chunk.get_block(x,y,z) if block_id != None: try: block_counts[block_id] += 1 except KeyError: block_counts[block_id] = 1
python
def bounded_stats_per_chunk(chunk, block_counts, start, stop): """Given a chunk, return the number of blocks types within the specified selection""" chunk_z, chunk_x = chunk.get_coords() for z in range(16): world_z = z + chunk_z*16 if ( (start != None and world_z < int(start[2])) or (stop != None and world_z > int(stop[2])) ): # Outside the bounding box; skip to next iteration #print("Z break: %d,%d,%d" % (world_z,start[2],stop[2])) break for x in range(16): world_x = x + chunk_x*16 if ( (start != None and world_x < int(start[0])) or (stop != None and world_x > int(stop[0])) ): # Outside the bounding box; skip to next iteration #print("X break: %d,%d,%d" % (world_x,start[0],stop[0])) break for y in range(chunk.get_max_height() + 1): if ( (start != None and y < int(start[1])) or (stop != None and y > int(stop[1])) ): # Outside the bounding box; skip to next iteration #print("Y break: %d,%d,%d" % (y,start[1],stop[1])) break #print("Chunk: %d,%d Coord: %d,%d,%d" % (c['x'], c['z'],x,y,z)) block_id = chunk.get_block(x,y,z) if block_id != None: try: block_counts[block_id] += 1 except KeyError: block_counts[block_id] = 1
[ "def", "bounded_stats_per_chunk", "(", "chunk", ",", "block_counts", ",", "start", ",", "stop", ")", ":", "chunk_z", ",", "chunk_x", "=", "chunk", ".", "get_coords", "(", ")", "for", "z", "in", "range", "(", "16", ")", ":", "world_z", "=", "z", "+", "chunk_z", "*", "16", "if", "(", "(", "start", "!=", "None", "and", "world_z", "<", "int", "(", "start", "[", "2", "]", ")", ")", "or", "(", "stop", "!=", "None", "and", "world_z", ">", "int", "(", "stop", "[", "2", "]", ")", ")", ")", ":", "# Outside the bounding box; skip to next iteration", "#print(\"Z break: %d,%d,%d\" % (world_z,start[2],stop[2]))", "break", "for", "x", "in", "range", "(", "16", ")", ":", "world_x", "=", "x", "+", "chunk_x", "*", "16", "if", "(", "(", "start", "!=", "None", "and", "world_x", "<", "int", "(", "start", "[", "0", "]", ")", ")", "or", "(", "stop", "!=", "None", "and", "world_x", ">", "int", "(", "stop", "[", "0", "]", ")", ")", ")", ":", "# Outside the bounding box; skip to next iteration", "#print(\"X break: %d,%d,%d\" % (world_x,start[0],stop[0]))", "break", "for", "y", "in", "range", "(", "chunk", ".", "get_max_height", "(", ")", "+", "1", ")", ":", "if", "(", "(", "start", "!=", "None", "and", "y", "<", "int", "(", "start", "[", "1", "]", ")", ")", "or", "(", "stop", "!=", "None", "and", "y", ">", "int", "(", "stop", "[", "1", "]", ")", ")", ")", ":", "# Outside the bounding box; skip to next iteration", "#print(\"Y break: %d,%d,%d\" % (y,start[1],stop[1]))", "break", "#print(\"Chunk: %d,%d Coord: %d,%d,%d\" % (c['x'], c['z'],x,y,z))", "block_id", "=", "chunk", ".", "get_block", "(", "x", ",", "y", ",", "z", ")", "if", "block_id", "!=", "None", ":", "try", ":", "block_counts", "[", "block_id", "]", "+=", "1", "except", "KeyError", ":", "block_counts", "[", "block_id", "]", "=", "1" ]
Given a chunk, return the number of blocks types within the specified selection
[ "Given", "a", "chunk", "return", "the", "number", "of", "blocks", "types", "within", "the", "specified", "selection" ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/examples/block_analysis.py#L33-L60
train
twoolie/NBT
examples/block_analysis.py
process_region_file
def process_region_file(region, start, stop): """Given a region, return the number of blocks of each ID in that region""" rx = region.loc.x rz = region.loc.z # Does the region overlap the bounding box at all? if (start != None): if ( (rx+1)*512-1 < int(start[0]) or (rz+1)*512-1 < int(start[2]) ): return elif (stop != None): if ( rx*512-1 > int(stop[0]) or rz*512-1 > int(stop[2]) ): return # Get all chunks print("Parsing region %s..." % os.path.basename(region.filename)) for c in region.iter_chunks_class(): cx, cz = c.get_coords(); # Does the chunk overlap the bounding box at all? if (start != None): if ( (cx+1)*16 + rx*512 - 1 < int(start[0]) or (cz+1)*16 + rz*512 - 1 < int(start[2]) ): continue elif (stop != None): if ( cx*16 + rx*512 - 1 > int(stop[0]) or cz*16 + rz*512 - 1 > int(stop[2]) ): continue #print("Parsing chunk (" + str(cx) + ", " + str(cz) + ")...") # Fast code if no start or stop coordinates are specified # TODO: also use this code if start/stop is specified, but the complete chunk is included if (start == None and stop == None): stats_per_chunk(c) else: # Slow code that iterates through each coordinate bounded_stats_per_chunk(c, start, stop)
python
def process_region_file(region, start, stop): """Given a region, return the number of blocks of each ID in that region""" rx = region.loc.x rz = region.loc.z # Does the region overlap the bounding box at all? if (start != None): if ( (rx+1)*512-1 < int(start[0]) or (rz+1)*512-1 < int(start[2]) ): return elif (stop != None): if ( rx*512-1 > int(stop[0]) or rz*512-1 > int(stop[2]) ): return # Get all chunks print("Parsing region %s..." % os.path.basename(region.filename)) for c in region.iter_chunks_class(): cx, cz = c.get_coords(); # Does the chunk overlap the bounding box at all? if (start != None): if ( (cx+1)*16 + rx*512 - 1 < int(start[0]) or (cz+1)*16 + rz*512 - 1 < int(start[2]) ): continue elif (stop != None): if ( cx*16 + rx*512 - 1 > int(stop[0]) or cz*16 + rz*512 - 1 > int(stop[2]) ): continue #print("Parsing chunk (" + str(cx) + ", " + str(cz) + ")...") # Fast code if no start or stop coordinates are specified # TODO: also use this code if start/stop is specified, but the complete chunk is included if (start == None and stop == None): stats_per_chunk(c) else: # Slow code that iterates through each coordinate bounded_stats_per_chunk(c, start, stop)
[ "def", "process_region_file", "(", "region", ",", "start", ",", "stop", ")", ":", "rx", "=", "region", ".", "loc", ".", "x", "rz", "=", "region", ".", "loc", ".", "z", "# Does the region overlap the bounding box at all?", "if", "(", "start", "!=", "None", ")", ":", "if", "(", "(", "rx", "+", "1", ")", "*", "512", "-", "1", "<", "int", "(", "start", "[", "0", "]", ")", "or", "(", "rz", "+", "1", ")", "*", "512", "-", "1", "<", "int", "(", "start", "[", "2", "]", ")", ")", ":", "return", "elif", "(", "stop", "!=", "None", ")", ":", "if", "(", "rx", "*", "512", "-", "1", ">", "int", "(", "stop", "[", "0", "]", ")", "or", "rz", "*", "512", "-", "1", ">", "int", "(", "stop", "[", "2", "]", ")", ")", ":", "return", "# Get all chunks", "print", "(", "\"Parsing region %s...\"", "%", "os", ".", "path", ".", "basename", "(", "region", ".", "filename", ")", ")", "for", "c", "in", "region", ".", "iter_chunks_class", "(", ")", ":", "cx", ",", "cz", "=", "c", ".", "get_coords", "(", ")", "# Does the chunk overlap the bounding box at all?", "if", "(", "start", "!=", "None", ")", ":", "if", "(", "(", "cx", "+", "1", ")", "*", "16", "+", "rx", "*", "512", "-", "1", "<", "int", "(", "start", "[", "0", "]", ")", "or", "(", "cz", "+", "1", ")", "*", "16", "+", "rz", "*", "512", "-", "1", "<", "int", "(", "start", "[", "2", "]", ")", ")", ":", "continue", "elif", "(", "stop", "!=", "None", ")", ":", "if", "(", "cx", "*", "16", "+", "rx", "*", "512", "-", "1", ">", "int", "(", "stop", "[", "0", "]", ")", "or", "cz", "*", "16", "+", "rz", "*", "512", "-", "1", ">", "int", "(", "stop", "[", "2", "]", ")", ")", ":", "continue", "#print(\"Parsing chunk (\" + str(cx) + \", \" + str(cz) + \")...\")", "# Fast code if no start or stop coordinates are specified", "# TODO: also use this code if start/stop is specified, but the complete chunk is included", "if", "(", "start", "==", "None", "and", "stop", "==", "None", ")", ":", "stats_per_chunk", "(", "c", ")", "else", ":", "# Slow code that iterates through each coordinate", "bounded_stats_per_chunk", "(", "c", ",", "start", ",", "stop", ")" ]
Given a region, return the number of blocks of each ID in that region
[ "Given", "a", "region", "return", "the", "number", "of", "blocks", "of", "each", "ID", "in", "that", "region" ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/examples/block_analysis.py#L63-L96
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.get_region
def get_region(self, x,z): """Get a region using x,z coordinates of a region. Cache results.""" if (x,z) not in self.regions: if (x,z) in self.regionfiles: self.regions[(x,z)] = region.RegionFile(self.regionfiles[(x,z)]) else: # Return an empty RegionFile object # TODO: this does not yet allow for saving of the region file # TODO: this currently fails with a ValueError! # TODO: generate the correct name, and create the file # and add the fie to self.regionfiles self.regions[(x,z)] = region.RegionFile() self.regions[(x,z)].loc = Location(x=x,z=z) return self.regions[(x,z)]
python
def get_region(self, x,z): """Get a region using x,z coordinates of a region. Cache results.""" if (x,z) not in self.regions: if (x,z) in self.regionfiles: self.regions[(x,z)] = region.RegionFile(self.regionfiles[(x,z)]) else: # Return an empty RegionFile object # TODO: this does not yet allow for saving of the region file # TODO: this currently fails with a ValueError! # TODO: generate the correct name, and create the file # and add the fie to self.regionfiles self.regions[(x,z)] = region.RegionFile() self.regions[(x,z)].loc = Location(x=x,z=z) return self.regions[(x,z)]
[ "def", "get_region", "(", "self", ",", "x", ",", "z", ")", ":", "if", "(", "x", ",", "z", ")", "not", "in", "self", ".", "regions", ":", "if", "(", "x", ",", "z", ")", "in", "self", ".", "regionfiles", ":", "self", ".", "regions", "[", "(", "x", ",", "z", ")", "]", "=", "region", ".", "RegionFile", "(", "self", ".", "regionfiles", "[", "(", "x", ",", "z", ")", "]", ")", "else", ":", "# Return an empty RegionFile object", "# TODO: this does not yet allow for saving of the region file", "# TODO: this currently fails with a ValueError!", "# TODO: generate the correct name, and create the file", "# and add the fie to self.regionfiles", "self", ".", "regions", "[", "(", "x", ",", "z", ")", "]", "=", "region", ".", "RegionFile", "(", ")", "self", ".", "regions", "[", "(", "x", ",", "z", ")", "]", ".", "loc", "=", "Location", "(", "x", "=", "x", ",", "z", "=", "z", ")", "return", "self", ".", "regions", "[", "(", "x", ",", "z", ")", "]" ]
Get a region using x,z coordinates of a region. Cache results.
[ "Get", "a", "region", "using", "x", "z", "coordinates", "of", "a", "region", ".", "Cache", "results", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L77-L90
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.iter_regions
def iter_regions(self): """ Return an iterable list of all region files. Use this function if you only want to loop through each region files once, and do not want to cache the results. """ # TODO: Implement BoundingBox # TODO: Implement sort order for x,z in self.regionfiles.keys(): close_after_use = False if (x,z) in self.regions: regionfile = self.regions[(x,z)] else: # It is not yet cached. # Get file, but do not cache later. regionfile = region.RegionFile(self.regionfiles[(x,z)], chunkclass = self.chunkclass) regionfile.loc = Location(x=x,z=z) close_after_use = True try: yield regionfile finally: if close_after_use: regionfile.close()
python
def iter_regions(self): """ Return an iterable list of all region files. Use this function if you only want to loop through each region files once, and do not want to cache the results. """ # TODO: Implement BoundingBox # TODO: Implement sort order for x,z in self.regionfiles.keys(): close_after_use = False if (x,z) in self.regions: regionfile = self.regions[(x,z)] else: # It is not yet cached. # Get file, but do not cache later. regionfile = region.RegionFile(self.regionfiles[(x,z)], chunkclass = self.chunkclass) regionfile.loc = Location(x=x,z=z) close_after_use = True try: yield regionfile finally: if close_after_use: regionfile.close()
[ "def", "iter_regions", "(", "self", ")", ":", "# TODO: Implement BoundingBox", "# TODO: Implement sort order", "for", "x", ",", "z", "in", "self", ".", "regionfiles", ".", "keys", "(", ")", ":", "close_after_use", "=", "False", "if", "(", "x", ",", "z", ")", "in", "self", ".", "regions", ":", "regionfile", "=", "self", ".", "regions", "[", "(", "x", ",", "z", ")", "]", "else", ":", "# It is not yet cached.", "# Get file, but do not cache later.", "regionfile", "=", "region", ".", "RegionFile", "(", "self", ".", "regionfiles", "[", "(", "x", ",", "z", ")", "]", ",", "chunkclass", "=", "self", ".", "chunkclass", ")", "regionfile", ".", "loc", "=", "Location", "(", "x", "=", "x", ",", "z", "=", "z", ")", "close_after_use", "=", "True", "try", ":", "yield", "regionfile", "finally", ":", "if", "close_after_use", ":", "regionfile", ".", "close", "(", ")" ]
Return an iterable list of all region files. Use this function if you only want to loop through each region files once, and do not want to cache the results.
[ "Return", "an", "iterable", "list", "of", "all", "region", "files", ".", "Use", "this", "function", "if", "you", "only", "want", "to", "loop", "through", "each", "region", "files", "once", "and", "do", "not", "want", "to", "cache", "the", "results", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L92-L113
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.get_nbt
def get_nbt(self,x,z): """ Return a NBT specified by the chunk coordinates x,z. Raise InconceivedChunk if the NBT file is not yet generated. To get a Chunk object, use get_chunk. """ rx,cx = divmod(x,32) rz,cz = divmod(z,32) if (rx,rz) not in self.regions and (rx,rz) not in self.regionfiles: raise InconceivedChunk("Chunk %s,%s is not present in world" % (x,z)) nbt = self.get_region(rx,rz).get_nbt(cx,cz) assert nbt != None return nbt
python
def get_nbt(self,x,z): """ Return a NBT specified by the chunk coordinates x,z. Raise InconceivedChunk if the NBT file is not yet generated. To get a Chunk object, use get_chunk. """ rx,cx = divmod(x,32) rz,cz = divmod(z,32) if (rx,rz) not in self.regions and (rx,rz) not in self.regionfiles: raise InconceivedChunk("Chunk %s,%s is not present in world" % (x,z)) nbt = self.get_region(rx,rz).get_nbt(cx,cz) assert nbt != None return nbt
[ "def", "get_nbt", "(", "self", ",", "x", ",", "z", ")", ":", "rx", ",", "cx", "=", "divmod", "(", "x", ",", "32", ")", "rz", ",", "cz", "=", "divmod", "(", "z", ",", "32", ")", "if", "(", "rx", ",", "rz", ")", "not", "in", "self", ".", "regions", "and", "(", "rx", ",", "rz", ")", "not", "in", "self", ".", "regionfiles", ":", "raise", "InconceivedChunk", "(", "\"Chunk %s,%s is not present in world\"", "%", "(", "x", ",", "z", ")", ")", "nbt", "=", "self", ".", "get_region", "(", "rx", ",", "rz", ")", ".", "get_nbt", "(", "cx", ",", "cz", ")", "assert", "nbt", "!=", "None", "return", "nbt" ]
Return a NBT specified by the chunk coordinates x,z. Raise InconceivedChunk if the NBT file is not yet generated. To get a Chunk object, use get_chunk.
[ "Return", "a", "NBT", "specified", "by", "the", "chunk", "coordinates", "x", "z", ".", "Raise", "InconceivedChunk", "if", "the", "NBT", "file", "is", "not", "yet", "generated", ".", "To", "get", "a", "Chunk", "object", "use", "get_chunk", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L130-L141
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.get_chunk
def get_chunk(self,x,z): """ Return a chunk specified by the chunk coordinates x,z. Raise InconceivedChunk if the chunk is not yet generated. To get the raw NBT data, use get_nbt. """ return self.chunkclass(self.get_nbt(x, z))
python
def get_chunk(self,x,z): """ Return a chunk specified by the chunk coordinates x,z. Raise InconceivedChunk if the chunk is not yet generated. To get the raw NBT data, use get_nbt. """ return self.chunkclass(self.get_nbt(x, z))
[ "def", "get_chunk", "(", "self", ",", "x", ",", "z", ")", ":", "return", "self", ".", "chunkclass", "(", "self", ".", "get_nbt", "(", "x", ",", "z", ")", ")" ]
Return a chunk specified by the chunk coordinates x,z. Raise InconceivedChunk if the chunk is not yet generated. To get the raw NBT data, use get_nbt.
[ "Return", "a", "chunk", "specified", "by", "the", "chunk", "coordinates", "x", "z", ".", "Raise", "InconceivedChunk", "if", "the", "chunk", "is", "not", "yet", "generated", ".", "To", "get", "the", "raw", "NBT", "data", "use", "get_nbt", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L178-L183
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.chunk_count
def chunk_count(self): """Return a count of the chunks in this world folder.""" c = 0 for r in self.iter_regions(): c += r.chunk_count() return c
python
def chunk_count(self): """Return a count of the chunks in this world folder.""" c = 0 for r in self.iter_regions(): c += r.chunk_count() return c
[ "def", "chunk_count", "(", "self", ")", ":", "c", "=", "0", "for", "r", "in", "self", ".", "iter_regions", "(", ")", ":", "c", "+=", "r", ".", "chunk_count", "(", ")", "return", "c" ]
Return a count of the chunks in this world folder.
[ "Return", "a", "count", "of", "the", "chunks", "in", "this", "world", "folder", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L209-L214
train
twoolie/NBT
nbt/world.py
_BaseWorldFolder.get_boundingbox
def get_boundingbox(self): """ Return minimum and maximum x and z coordinates of the chunks that make up this world save """ b = BoundingBox() for rx,rz in self.regionfiles.keys(): region = self.get_region(rx,rz) rx,rz = 32*rx,32*rz for cc in region.get_chunk_coords(): x,z = (rx+cc['x'],rz+cc['z']) b.expand(x,None,z) return b
python
def get_boundingbox(self): """ Return minimum and maximum x and z coordinates of the chunks that make up this world save """ b = BoundingBox() for rx,rz in self.regionfiles.keys(): region = self.get_region(rx,rz) rx,rz = 32*rx,32*rz for cc in region.get_chunk_coords(): x,z = (rx+cc['x'],rz+cc['z']) b.expand(x,None,z) return b
[ "def", "get_boundingbox", "(", "self", ")", ":", "b", "=", "BoundingBox", "(", ")", "for", "rx", ",", "rz", "in", "self", ".", "regionfiles", ".", "keys", "(", ")", ":", "region", "=", "self", ".", "get_region", "(", "rx", ",", "rz", ")", "rx", ",", "rz", "=", "32", "*", "rx", ",", "32", "*", "rz", "for", "cc", "in", "region", ".", "get_chunk_coords", "(", ")", ":", "x", ",", "z", "=", "(", "rx", "+", "cc", "[", "'x'", "]", ",", "rz", "+", "cc", "[", "'z'", "]", ")", "b", ".", "expand", "(", "x", ",", "None", ",", "z", ")", "return", "b" ]
Return minimum and maximum x and z coordinates of the chunks that make up this world save
[ "Return", "minimum", "and", "maximum", "x", "and", "z", "coordinates", "of", "the", "chunks", "that", "make", "up", "this", "world", "save" ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L216-L228
train
twoolie/NBT
nbt/world.py
BoundingBox.expand
def expand(self,x,y,z): """ Expands the bounding """ if x != None: if self.minx is None or x < self.minx: self.minx = x if self.maxx is None or x > self.maxx: self.maxx = x if y != None: if self.miny is None or y < self.miny: self.miny = y if self.maxy is None or y > self.maxy: self.maxy = y if z != None: if self.minz is None or z < self.minz: self.minz = z if self.maxz is None or z > self.maxz: self.maxz = z
python
def expand(self,x,y,z): """ Expands the bounding """ if x != None: if self.minx is None or x < self.minx: self.minx = x if self.maxx is None or x > self.maxx: self.maxx = x if y != None: if self.miny is None or y < self.miny: self.miny = y if self.maxy is None or y > self.maxy: self.maxy = y if z != None: if self.minz is None or z < self.minz: self.minz = z if self.maxz is None or z > self.maxz: self.maxz = z
[ "def", "expand", "(", "self", ",", "x", ",", "y", ",", "z", ")", ":", "if", "x", "!=", "None", ":", "if", "self", ".", "minx", "is", "None", "or", "x", "<", "self", ".", "minx", ":", "self", ".", "minx", "=", "x", "if", "self", ".", "maxx", "is", "None", "or", "x", ">", "self", ".", "maxx", ":", "self", ".", "maxx", "=", "x", "if", "y", "!=", "None", ":", "if", "self", ".", "miny", "is", "None", "or", "y", "<", "self", ".", "miny", ":", "self", ".", "miny", "=", "y", "if", "self", ".", "maxy", "is", "None", "or", "y", ">", "self", ".", "maxy", ":", "self", ".", "maxy", "=", "y", "if", "z", "!=", "None", ":", "if", "self", ".", "minz", "is", "None", "or", "z", "<", "self", ".", "minz", ":", "self", ".", "minz", "=", "z", "if", "self", ".", "maxz", "is", "None", "or", "z", ">", "self", ".", "maxz", ":", "self", ".", "maxz", "=", "z" ]
Expands the bounding
[ "Expands", "the", "bounding" ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/world.py#L275-L293
train
twoolie/NBT
examples/utilities.py
unpack_nbt
def unpack_nbt(tag): """ Unpack an NBT tag into a native Python data structure. """ if isinstance(tag, TAG_List): return [unpack_nbt(i) for i in tag.tags] elif isinstance(tag, TAG_Compound): return dict((i.name, unpack_nbt(i)) for i in tag.tags) else: return tag.value
python
def unpack_nbt(tag): """ Unpack an NBT tag into a native Python data structure. """ if isinstance(tag, TAG_List): return [unpack_nbt(i) for i in tag.tags] elif isinstance(tag, TAG_Compound): return dict((i.name, unpack_nbt(i)) for i in tag.tags) else: return tag.value
[ "def", "unpack_nbt", "(", "tag", ")", ":", "if", "isinstance", "(", "tag", ",", "TAG_List", ")", ":", "return", "[", "unpack_nbt", "(", "i", ")", "for", "i", "in", "tag", ".", "tags", "]", "elif", "isinstance", "(", "tag", ",", "TAG_Compound", ")", ":", "return", "dict", "(", "(", "i", ".", "name", ",", "unpack_nbt", "(", "i", ")", ")", "for", "i", "in", "tag", ".", "tags", ")", "else", ":", "return", "tag", ".", "value" ]
Unpack an NBT tag into a native Python data structure.
[ "Unpack", "an", "NBT", "tag", "into", "a", "native", "Python", "data", "structure", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/examples/utilities.py#L21-L31
train
twoolie/NBT
nbt/region.py
RegionFile._init_file
def _init_file(self): """Initialise the file header. This will erase any data previously in the file.""" header_length = 2*SECTOR_LENGTH if self.size > header_length: self.file.truncate(header_length) self.file.seek(0) self.file.write(header_length*b'\x00') self.size = header_length
python
def _init_file(self): """Initialise the file header. This will erase any data previously in the file.""" header_length = 2*SECTOR_LENGTH if self.size > header_length: self.file.truncate(header_length) self.file.seek(0) self.file.write(header_length*b'\x00') self.size = header_length
[ "def", "_init_file", "(", "self", ")", ":", "header_length", "=", "2", "*", "SECTOR_LENGTH", "if", "self", ".", "size", ">", "header_length", ":", "self", ".", "file", ".", "truncate", "(", "header_length", ")", "self", ".", "file", ".", "seek", "(", "0", ")", "self", ".", "file", ".", "write", "(", "header_length", "*", "b'\\x00'", ")", "self", ".", "size", "=", "header_length" ]
Initialise the file header. This will erase any data previously in the file.
[ "Initialise", "the", "file", "header", ".", "This", "will", "erase", "any", "data", "previously", "in", "the", "file", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L297-L304
train
twoolie/NBT
nbt/region.py
RegionFile._sectors
def _sectors(self, ignore_chunk=None): """ Return a list of all sectors, each sector is a list of chunks occupying the block. """ sectorsize = self._bytes_to_sector(self.size) sectors = [[] for s in range(sectorsize)] sectors[0] = True # locations sectors[1] = True # timestamps for m in self.metadata.values(): if not m.is_created(): continue if ignore_chunk == m: continue if m.blocklength and m.blockstart: blockend = m.blockstart + max(m.blocklength, m.requiredblocks()) # Ensure 2 <= b < sectorsize, as well as m.blockstart <= b < blockend for b in range(max(m.blockstart, 2), min(blockend, sectorsize)): sectors[b].append(m) return sectors
python
def _sectors(self, ignore_chunk=None): """ Return a list of all sectors, each sector is a list of chunks occupying the block. """ sectorsize = self._bytes_to_sector(self.size) sectors = [[] for s in range(sectorsize)] sectors[0] = True # locations sectors[1] = True # timestamps for m in self.metadata.values(): if not m.is_created(): continue if ignore_chunk == m: continue if m.blocklength and m.blockstart: blockend = m.blockstart + max(m.blocklength, m.requiredblocks()) # Ensure 2 <= b < sectorsize, as well as m.blockstart <= b < blockend for b in range(max(m.blockstart, 2), min(blockend, sectorsize)): sectors[b].append(m) return sectors
[ "def", "_sectors", "(", "self", ",", "ignore_chunk", "=", "None", ")", ":", "sectorsize", "=", "self", ".", "_bytes_to_sector", "(", "self", ".", "size", ")", "sectors", "=", "[", "[", "]", "for", "s", "in", "range", "(", "sectorsize", ")", "]", "sectors", "[", "0", "]", "=", "True", "# locations", "sectors", "[", "1", "]", "=", "True", "# timestamps", "for", "m", "in", "self", ".", "metadata", ".", "values", "(", ")", ":", "if", "not", "m", ".", "is_created", "(", ")", ":", "continue", "if", "ignore_chunk", "==", "m", ":", "continue", "if", "m", ".", "blocklength", "and", "m", ".", "blockstart", ":", "blockend", "=", "m", ".", "blockstart", "+", "max", "(", "m", ".", "blocklength", ",", "m", ".", "requiredblocks", "(", ")", ")", "# Ensure 2 <= b < sectorsize, as well as m.blockstart <= b < blockend", "for", "b", "in", "range", "(", "max", "(", "m", ".", "blockstart", ",", "2", ")", ",", "min", "(", "blockend", ",", "sectorsize", ")", ")", ":", "sectors", "[", "b", "]", ".", "append", "(", "m", ")", "return", "sectors" ]
Return a list of all sectors, each sector is a list of chunks occupying the block.
[ "Return", "a", "list", "of", "all", "sectors", "each", "sector", "is", "a", "list", "of", "chunks", "occupying", "the", "block", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L384-L402
train
twoolie/NBT
nbt/region.py
RegionFile._locate_free_sectors
def _locate_free_sectors(self, ignore_chunk=None): """Return a list of booleans, indicating the free sectors.""" sectors = self._sectors(ignore_chunk=ignore_chunk) # Sectors are considered free, if the value is an empty list. return [not i for i in sectors]
python
def _locate_free_sectors(self, ignore_chunk=None): """Return a list of booleans, indicating the free sectors.""" sectors = self._sectors(ignore_chunk=ignore_chunk) # Sectors are considered free, if the value is an empty list. return [not i for i in sectors]
[ "def", "_locate_free_sectors", "(", "self", ",", "ignore_chunk", "=", "None", ")", ":", "sectors", "=", "self", ".", "_sectors", "(", "ignore_chunk", "=", "ignore_chunk", ")", "# Sectors are considered free, if the value is an empty list.", "return", "[", "not", "i", "for", "i", "in", "sectors", "]" ]
Return a list of booleans, indicating the free sectors.
[ "Return", "a", "list", "of", "booleans", "indicating", "the", "free", "sectors", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L404-L408
train
twoolie/NBT
nbt/region.py
RegionFile.get_nbt
def get_nbt(self, x, z): """ Return a NBTFile of the specified chunk. Raise InconceivedChunk if the chunk is not included in the file. """ # TODO: cache results? data = self.get_blockdata(x, z) # This may raise a RegionFileFormatError. data = BytesIO(data) err = None try: nbt = NBTFile(buffer=data) if self.loc.x != None: x += self.loc.x*32 if self.loc.z != None: z += self.loc.z*32 nbt.loc = Location(x=x, z=z) return nbt # this may raise a MalformedFileError. Convert to ChunkDataError. except MalformedFileError as e: err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. if err: raise ChunkDataError(err)
python
def get_nbt(self, x, z): """ Return a NBTFile of the specified chunk. Raise InconceivedChunk if the chunk is not included in the file. """ # TODO: cache results? data = self.get_blockdata(x, z) # This may raise a RegionFileFormatError. data = BytesIO(data) err = None try: nbt = NBTFile(buffer=data) if self.loc.x != None: x += self.loc.x*32 if self.loc.z != None: z += self.loc.z*32 nbt.loc = Location(x=x, z=z) return nbt # this may raise a MalformedFileError. Convert to ChunkDataError. except MalformedFileError as e: err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. if err: raise ChunkDataError(err)
[ "def", "get_nbt", "(", "self", ",", "x", ",", "z", ")", ":", "# TODO: cache results?", "data", "=", "self", ".", "get_blockdata", "(", "x", ",", "z", ")", "# This may raise a RegionFileFormatError.", "data", "=", "BytesIO", "(", "data", ")", "err", "=", "None", "try", ":", "nbt", "=", "NBTFile", "(", "buffer", "=", "data", ")", "if", "self", ".", "loc", ".", "x", "!=", "None", ":", "x", "+=", "self", ".", "loc", ".", "x", "*", "32", "if", "self", ".", "loc", ".", "z", "!=", "None", ":", "z", "+=", "self", ".", "loc", ".", "z", "*", "32", "nbt", ".", "loc", "=", "Location", "(", "x", "=", "x", ",", "z", "=", "z", ")", "return", "nbt", "# this may raise a MalformedFileError. Convert to ChunkDataError.", "except", "MalformedFileError", "as", "e", ":", "err", "=", "'%s'", "%", "e", "# avoid str(e) due to Unicode issues in Python 2.", "if", "err", ":", "raise", "ChunkDataError", "(", "err", ")" ]
Return a NBTFile of the specified chunk. Raise InconceivedChunk if the chunk is not included in the file.
[ "Return", "a", "NBTFile", "of", "the", "specified", "chunk", ".", "Raise", "InconceivedChunk", "if", "the", "chunk", "is", "not", "included", "in", "the", "file", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L585-L606
train
twoolie/NBT
nbt/region.py
RegionFile.write_chunk
def write_chunk(self, x, z, nbt_file): """ Pack the NBT file as binary data, and write to file in a compressed format. """ data = BytesIO() nbt_file.write_file(buffer=data) # render to buffer; uncompressed self.write_blockdata(x, z, data.getvalue())
python
def write_chunk(self, x, z, nbt_file): """ Pack the NBT file as binary data, and write to file in a compressed format. """ data = BytesIO() nbt_file.write_file(buffer=data) # render to buffer; uncompressed self.write_blockdata(x, z, data.getvalue())
[ "def", "write_chunk", "(", "self", ",", "x", ",", "z", ",", "nbt_file", ")", ":", "data", "=", "BytesIO", "(", ")", "nbt_file", ".", "write_file", "(", "buffer", "=", "data", ")", "# render to buffer; uncompressed", "self", ".", "write_blockdata", "(", "x", ",", "z", ",", "data", ".", "getvalue", "(", ")", ")" ]
Pack the NBT file as binary data, and write to file in a compressed format.
[ "Pack", "the", "NBT", "file", "as", "binary", "data", "and", "write", "to", "file", "in", "a", "compressed", "format", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L712-L718
train
twoolie/NBT
nbt/region.py
RegionFile.unlink_chunk
def unlink_chunk(self, x, z): """ Remove a chunk from the header of the region file. Fragmentation is not a problem, chunks are written to free sectors when possible. """ # This function fails for an empty file. If that is the case, just return. if self.size < 2*SECTOR_LENGTH: return # zero the region header for the chunk (offset length and time) self.file.seek(4 * (x + 32*z)) self.file.write(pack(">IB", 0, 0)[1:]) self.file.seek(SECTOR_LENGTH + 4 * (x + 32*z)) self.file.write(pack(">I", 0)) # Check if file should be truncated: current = self.metadata[x, z] free_sectors = self._locate_free_sectors(ignore_chunk=current) truncate_count = list(reversed(free_sectors)).index(False) if truncate_count > 0: self.size = SECTOR_LENGTH * (len(free_sectors) - truncate_count) self.file.truncate(self.size) free_sectors = free_sectors[:-truncate_count] # Calculate freed sectors for s in range(current.blockstart, min(current.blockstart + current.blocklength, len(free_sectors))): if free_sectors[s]: # zero sector s self.file.seek(SECTOR_LENGTH*s) self.file.write(SECTOR_LENGTH*b'\x00') # update the header self.metadata[x, z] = ChunkMetadata(x, z)
python
def unlink_chunk(self, x, z): """ Remove a chunk from the header of the region file. Fragmentation is not a problem, chunks are written to free sectors when possible. """ # This function fails for an empty file. If that is the case, just return. if self.size < 2*SECTOR_LENGTH: return # zero the region header for the chunk (offset length and time) self.file.seek(4 * (x + 32*z)) self.file.write(pack(">IB", 0, 0)[1:]) self.file.seek(SECTOR_LENGTH + 4 * (x + 32*z)) self.file.write(pack(">I", 0)) # Check if file should be truncated: current = self.metadata[x, z] free_sectors = self._locate_free_sectors(ignore_chunk=current) truncate_count = list(reversed(free_sectors)).index(False) if truncate_count > 0: self.size = SECTOR_LENGTH * (len(free_sectors) - truncate_count) self.file.truncate(self.size) free_sectors = free_sectors[:-truncate_count] # Calculate freed sectors for s in range(current.blockstart, min(current.blockstart + current.blocklength, len(free_sectors))): if free_sectors[s]: # zero sector s self.file.seek(SECTOR_LENGTH*s) self.file.write(SECTOR_LENGTH*b'\x00') # update the header self.metadata[x, z] = ChunkMetadata(x, z)
[ "def", "unlink_chunk", "(", "self", ",", "x", ",", "z", ")", ":", "# This function fails for an empty file. If that is the case, just return.", "if", "self", ".", "size", "<", "2", "*", "SECTOR_LENGTH", ":", "return", "# zero the region header for the chunk (offset length and time)", "self", ".", "file", ".", "seek", "(", "4", "*", "(", "x", "+", "32", "*", "z", ")", ")", "self", ".", "file", ".", "write", "(", "pack", "(", "\">IB\"", ",", "0", ",", "0", ")", "[", "1", ":", "]", ")", "self", ".", "file", ".", "seek", "(", "SECTOR_LENGTH", "+", "4", "*", "(", "x", "+", "32", "*", "z", ")", ")", "self", ".", "file", ".", "write", "(", "pack", "(", "\">I\"", ",", "0", ")", ")", "# Check if file should be truncated:", "current", "=", "self", ".", "metadata", "[", "x", ",", "z", "]", "free_sectors", "=", "self", ".", "_locate_free_sectors", "(", "ignore_chunk", "=", "current", ")", "truncate_count", "=", "list", "(", "reversed", "(", "free_sectors", ")", ")", ".", "index", "(", "False", ")", "if", "truncate_count", ">", "0", ":", "self", ".", "size", "=", "SECTOR_LENGTH", "*", "(", "len", "(", "free_sectors", ")", "-", "truncate_count", ")", "self", ".", "file", ".", "truncate", "(", "self", ".", "size", ")", "free_sectors", "=", "free_sectors", "[", ":", "-", "truncate_count", "]", "# Calculate freed sectors", "for", "s", "in", "range", "(", "current", ".", "blockstart", ",", "min", "(", "current", ".", "blockstart", "+", "current", ".", "blocklength", ",", "len", "(", "free_sectors", ")", ")", ")", ":", "if", "free_sectors", "[", "s", "]", ":", "# zero sector s", "self", ".", "file", ".", "seek", "(", "SECTOR_LENGTH", "*", "s", ")", "self", ".", "file", ".", "write", "(", "SECTOR_LENGTH", "*", "b'\\x00'", ")", "# update the header", "self", ".", "metadata", "[", "x", ",", "z", "]", "=", "ChunkMetadata", "(", "x", ",", "z", ")" ]
Remove a chunk from the header of the region file. Fragmentation is not a problem, chunks are written to free sectors when possible.
[ "Remove", "a", "chunk", "from", "the", "header", "of", "the", "region", "file", ".", "Fragmentation", "is", "not", "a", "problem", "chunks", "are", "written", "to", "free", "sectors", "when", "possible", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L720-L752
train
twoolie/NBT
nbt/region.py
RegionFile._classname
def _classname(self): """Return the fully qualified class name.""" if self.__class__.__module__ in (None,): return self.__class__.__name__ else: return "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
python
def _classname(self): """Return the fully qualified class name.""" if self.__class__.__module__ in (None,): return self.__class__.__name__ else: return "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
[ "def", "_classname", "(", "self", ")", ":", "if", "self", ".", "__class__", ".", "__module__", "in", "(", "None", ",", ")", ":", "return", "self", ".", "__class__", ".", "__name__", "else", ":", "return", "\"%s.%s\"", "%", "(", "self", ".", "__class__", ".", "__module__", ",", "self", ".", "__class__", ".", "__name__", ")" ]
Return the fully qualified class name.
[ "Return", "the", "fully", "qualified", "class", "name", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/region.py#L754-L759
train
twoolie/NBT
examples/chest_analysis.py
chests_per_chunk
def chests_per_chunk(chunk): """Find chests and get contents in a given chunk.""" chests = [] for entity in chunk['Entities']: eid = entity["id"].value if eid == "Minecart" and entity["type"].value == 1 or eid == "minecraft:chest_minecart": x,y,z = entity["Pos"] x,y,z = x.value,y.value,z.value # Treasures are empty upon first opening try: items = items_from_nbt(entity["Items"]) except KeyError: items = {} chests.append(Chest("Minecart with chest",(x,y,z),items)) for entity in chunk['TileEntities']: eid = entity["id"].value if eid == "Chest" or eid == "minecraft:chest": x,y,z = entity["x"].value,entity["y"].value,entity["z"].value # Treasures are empty upon first opening try: items = items_from_nbt(entity["Items"]) except KeyError: items = {} chests.append(Chest("Chest",(x,y,z),items)) return chests
python
def chests_per_chunk(chunk): """Find chests and get contents in a given chunk.""" chests = [] for entity in chunk['Entities']: eid = entity["id"].value if eid == "Minecart" and entity["type"].value == 1 or eid == "minecraft:chest_minecart": x,y,z = entity["Pos"] x,y,z = x.value,y.value,z.value # Treasures are empty upon first opening try: items = items_from_nbt(entity["Items"]) except KeyError: items = {} chests.append(Chest("Minecart with chest",(x,y,z),items)) for entity in chunk['TileEntities']: eid = entity["id"].value if eid == "Chest" or eid == "minecraft:chest": x,y,z = entity["x"].value,entity["y"].value,entity["z"].value # Treasures are empty upon first opening try: items = items_from_nbt(entity["Items"]) except KeyError: items = {} chests.append(Chest("Chest",(x,y,z),items)) return chests
[ "def", "chests_per_chunk", "(", "chunk", ")", ":", "chests", "=", "[", "]", "for", "entity", "in", "chunk", "[", "'Entities'", "]", ":", "eid", "=", "entity", "[", "\"id\"", "]", ".", "value", "if", "eid", "==", "\"Minecart\"", "and", "entity", "[", "\"type\"", "]", ".", "value", "==", "1", "or", "eid", "==", "\"minecraft:chest_minecart\"", ":", "x", ",", "y", ",", "z", "=", "entity", "[", "\"Pos\"", "]", "x", ",", "y", ",", "z", "=", "x", ".", "value", ",", "y", ".", "value", ",", "z", ".", "value", "# Treasures are empty upon first opening", "try", ":", "items", "=", "items_from_nbt", "(", "entity", "[", "\"Items\"", "]", ")", "except", "KeyError", ":", "items", "=", "{", "}", "chests", ".", "append", "(", "Chest", "(", "\"Minecart with chest\"", ",", "(", "x", ",", "y", ",", "z", ")", ",", "items", ")", ")", "for", "entity", "in", "chunk", "[", "'TileEntities'", "]", ":", "eid", "=", "entity", "[", "\"id\"", "]", ".", "value", "if", "eid", "==", "\"Chest\"", "or", "eid", "==", "\"minecraft:chest\"", ":", "x", ",", "y", ",", "z", "=", "entity", "[", "\"x\"", "]", ".", "value", ",", "entity", "[", "\"y\"", "]", ".", "value", ",", "entity", "[", "\"z\"", "]", ".", "value", "# Treasures are empty upon first opening", "try", ":", "items", "=", "items_from_nbt", "(", "entity", "[", "\"Items\"", "]", ")", "except", "KeyError", ":", "items", "=", "{", "}", "chests", ".", "append", "(", "Chest", "(", "\"Chest\"", ",", "(", "x", ",", "y", ",", "z", ")", ",", "items", ")", ")", "return", "chests" ]
Find chests and get contents in a given chunk.
[ "Find", "chests", "and", "get", "contents", "in", "a", "given", "chunk", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/examples/chest_analysis.py#L49-L81
train
twoolie/NBT
nbt/chunk.py
AnvilChunk.get_block
def get_block(self, x, y, z): """Get a block from relative x,y,z.""" sy,by = divmod(y, 16) section = self.get_section(sy) if section == None: return None return section.get_block(x, by, z)
python
def get_block(self, x, y, z): """Get a block from relative x,y,z.""" sy,by = divmod(y, 16) section = self.get_section(sy) if section == None: return None return section.get_block(x, by, z)
[ "def", "get_block", "(", "self", ",", "x", ",", "y", ",", "z", ")", ":", "sy", ",", "by", "=", "divmod", "(", "y", ",", "16", ")", "section", "=", "self", ".", "get_section", "(", "sy", ")", "if", "section", "==", "None", ":", "return", "None", "return", "section", ".", "get_block", "(", "x", ",", "by", ",", "z", ")" ]
Get a block from relative x,y,z.
[ "Get", "a", "block", "from", "relative", "x", "y", "z", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L281-L288
train
twoolie/NBT
nbt/chunk.py
BlockArray.get_blocks_byte_array
def get_blocks_byte_array(self, buffer=False): """Return a list of all blocks in this chunk.""" if buffer: length = len(self.blocksList) return BytesIO(pack(">i", length)+self.get_blocks_byte_array()) else: return array.array('B', self.blocksList).tostring()
python
def get_blocks_byte_array(self, buffer=False): """Return a list of all blocks in this chunk.""" if buffer: length = len(self.blocksList) return BytesIO(pack(">i", length)+self.get_blocks_byte_array()) else: return array.array('B', self.blocksList).tostring()
[ "def", "get_blocks_byte_array", "(", "self", ",", "buffer", "=", "False", ")", ":", "if", "buffer", ":", "length", "=", "len", "(", "self", ".", "blocksList", ")", "return", "BytesIO", "(", "pack", "(", "\">i\"", ",", "length", ")", "+", "self", ".", "get_blocks_byte_array", "(", ")", ")", "else", ":", "return", "array", ".", "array", "(", "'B'", ",", "self", ".", "blocksList", ")", ".", "tostring", "(", ")" ]
Return a list of all blocks in this chunk.
[ "Return", "a", "list", "of", "all", "blocks", "in", "this", "chunk", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L329-L335
train
twoolie/NBT
nbt/chunk.py
BlockArray.get_data_byte_array
def get_data_byte_array(self, buffer=False): """Return a list of data for all blocks in this chunk.""" if buffer: length = len(self.dataList) return BytesIO(pack(">i", length)+self.get_data_byte_array()) else: return array.array('B', self.dataList).tostring()
python
def get_data_byte_array(self, buffer=False): """Return a list of data for all blocks in this chunk.""" if buffer: length = len(self.dataList) return BytesIO(pack(">i", length)+self.get_data_byte_array()) else: return array.array('B', self.dataList).tostring()
[ "def", "get_data_byte_array", "(", "self", ",", "buffer", "=", "False", ")", ":", "if", "buffer", ":", "length", "=", "len", "(", "self", ".", "dataList", ")", "return", "BytesIO", "(", "pack", "(", "\">i\"", ",", "length", ")", "+", "self", ".", "get_data_byte_array", "(", ")", ")", "else", ":", "return", "array", ".", "array", "(", "'B'", ",", "self", ".", "dataList", ")", ".", "tostring", "(", ")" ]
Return a list of data for all blocks in this chunk.
[ "Return", "a", "list", "of", "data", "for", "all", "blocks", "in", "this", "chunk", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L337-L343
train
twoolie/NBT
nbt/chunk.py
BlockArray.generate_heightmap
def generate_heightmap(self, buffer=False, as_array=False): """Return a heightmap, representing the highest solid blocks in this chunk.""" non_solids = [0, 8, 9, 10, 11, 38, 37, 32, 31] if buffer: return BytesIO(pack(">i", 256)+self.generate_heightmap()) # Length + Heightmap, ready for insertion into Chunk NBT else: bytes = [] for z in range(16): for x in range(16): for y in range(127, -1, -1): offset = y + z*128 + x*128*16 if (self.blocksList[offset] not in non_solids or y == 0): bytes.append(y+1) break if (as_array): return bytes else: return array.array('B', bytes).tostring()
python
def generate_heightmap(self, buffer=False, as_array=False): """Return a heightmap, representing the highest solid blocks in this chunk.""" non_solids = [0, 8, 9, 10, 11, 38, 37, 32, 31] if buffer: return BytesIO(pack(">i", 256)+self.generate_heightmap()) # Length + Heightmap, ready for insertion into Chunk NBT else: bytes = [] for z in range(16): for x in range(16): for y in range(127, -1, -1): offset = y + z*128 + x*128*16 if (self.blocksList[offset] not in non_solids or y == 0): bytes.append(y+1) break if (as_array): return bytes else: return array.array('B', bytes).tostring()
[ "def", "generate_heightmap", "(", "self", ",", "buffer", "=", "False", ",", "as_array", "=", "False", ")", ":", "non_solids", "=", "[", "0", ",", "8", ",", "9", ",", "10", ",", "11", ",", "38", ",", "37", ",", "32", ",", "31", "]", "if", "buffer", ":", "return", "BytesIO", "(", "pack", "(", "\">i\"", ",", "256", ")", "+", "self", ".", "generate_heightmap", "(", ")", ")", "# Length + Heightmap, ready for insertion into Chunk NBT", "else", ":", "bytes", "=", "[", "]", "for", "z", "in", "range", "(", "16", ")", ":", "for", "x", "in", "range", "(", "16", ")", ":", "for", "y", "in", "range", "(", "127", ",", "-", "1", ",", "-", "1", ")", ":", "offset", "=", "y", "+", "z", "*", "128", "+", "x", "*", "128", "*", "16", "if", "(", "self", ".", "blocksList", "[", "offset", "]", "not", "in", "non_solids", "or", "y", "==", "0", ")", ":", "bytes", ".", "append", "(", "y", "+", "1", ")", "break", "if", "(", "as_array", ")", ":", "return", "bytes", "else", ":", "return", "array", ".", "array", "(", "'B'", ",", "bytes", ")", ".", "tostring", "(", ")" ]
Return a heightmap, representing the highest solid blocks in this chunk.
[ "Return", "a", "heightmap", "representing", "the", "highest", "solid", "blocks", "in", "this", "chunk", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L345-L362
train
twoolie/NBT
nbt/chunk.py
BlockArray.set_blocks
def set_blocks(self, list=None, dict=None, fill_air=False): """ Sets all blocks in this chunk, using either a list or dictionary. Blocks not explicitly set can be filled to air by setting fill_air to True. """ if list: # Inputting a list like self.blocksList self.blocksList = list elif dict: # Inputting a dictionary like result of self.get_blocks_struct() list = [] for x in range(16): for z in range(16): for y in range(128): coord = x,y,z offset = y + z*128 + x*128*16 if (coord in dict): list.append(dict[coord]) else: if (self.blocksList[offset] and not fill_air): list.append(self.blocksList[offset]) else: list.append(0) # Air self.blocksList = list else: # None of the above... return False return True
python
def set_blocks(self, list=None, dict=None, fill_air=False): """ Sets all blocks in this chunk, using either a list or dictionary. Blocks not explicitly set can be filled to air by setting fill_air to True. """ if list: # Inputting a list like self.blocksList self.blocksList = list elif dict: # Inputting a dictionary like result of self.get_blocks_struct() list = [] for x in range(16): for z in range(16): for y in range(128): coord = x,y,z offset = y + z*128 + x*128*16 if (coord in dict): list.append(dict[coord]) else: if (self.blocksList[offset] and not fill_air): list.append(self.blocksList[offset]) else: list.append(0) # Air self.blocksList = list else: # None of the above... return False return True
[ "def", "set_blocks", "(", "self", ",", "list", "=", "None", ",", "dict", "=", "None", ",", "fill_air", "=", "False", ")", ":", "if", "list", ":", "# Inputting a list like self.blocksList", "self", ".", "blocksList", "=", "list", "elif", "dict", ":", "# Inputting a dictionary like result of self.get_blocks_struct()", "list", "=", "[", "]", "for", "x", "in", "range", "(", "16", ")", ":", "for", "z", "in", "range", "(", "16", ")", ":", "for", "y", "in", "range", "(", "128", ")", ":", "coord", "=", "x", ",", "y", ",", "z", "offset", "=", "y", "+", "z", "*", "128", "+", "x", "*", "128", "*", "16", "if", "(", "coord", "in", "dict", ")", ":", "list", ".", "append", "(", "dict", "[", "coord", "]", ")", "else", ":", "if", "(", "self", ".", "blocksList", "[", "offset", "]", "and", "not", "fill_air", ")", ":", "list", ".", "append", "(", "self", ".", "blocksList", "[", "offset", "]", ")", "else", ":", "list", ".", "append", "(", "0", ")", "# Air", "self", ".", "blocksList", "=", "list", "else", ":", "# None of the above...", "return", "False", "return", "True" ]
Sets all blocks in this chunk, using either a list or dictionary. Blocks not explicitly set can be filled to air by setting fill_air to True.
[ "Sets", "all", "blocks", "in", "this", "chunk", "using", "either", "a", "list", "or", "dictionary", ".", "Blocks", "not", "explicitly", "set", "can", "be", "filled", "to", "air", "by", "setting", "fill_air", "to", "True", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L364-L391
train
twoolie/NBT
nbt/chunk.py
BlockArray.set_block
def set_block(self, x,y,z, id, data=0): """Sets the block a x, y, z to the specified id, and optionally data.""" offset = y + z*128 + x*128*16 self.blocksList[offset] = id if (offset % 2 == 1): # offset is odd index = (offset-1)//2 b = self.dataList[index] self.dataList[index] = (b & 240) + (data & 15) # modify lower bits, leaving higher bits in place else: # offset is even index = offset//2 b = self.dataList[index] self.dataList[index] = (b & 15) + (data << 4 & 240)
python
def set_block(self, x,y,z, id, data=0): """Sets the block a x, y, z to the specified id, and optionally data.""" offset = y + z*128 + x*128*16 self.blocksList[offset] = id if (offset % 2 == 1): # offset is odd index = (offset-1)//2 b = self.dataList[index] self.dataList[index] = (b & 240) + (data & 15) # modify lower bits, leaving higher bits in place else: # offset is even index = offset//2 b = self.dataList[index] self.dataList[index] = (b & 15) + (data << 4 & 240)
[ "def", "set_block", "(", "self", ",", "x", ",", "y", ",", "z", ",", "id", ",", "data", "=", "0", ")", ":", "offset", "=", "y", "+", "z", "*", "128", "+", "x", "*", "128", "*", "16", "self", ".", "blocksList", "[", "offset", "]", "=", "id", "if", "(", "offset", "%", "2", "==", "1", ")", ":", "# offset is odd", "index", "=", "(", "offset", "-", "1", ")", "//", "2", "b", "=", "self", ".", "dataList", "[", "index", "]", "self", ".", "dataList", "[", "index", "]", "=", "(", "b", "&", "240", ")", "+", "(", "data", "&", "15", ")", "# modify lower bits, leaving higher bits in place", "else", ":", "# offset is even", "index", "=", "offset", "//", "2", "b", "=", "self", ".", "dataList", "[", "index", "]", "self", ".", "dataList", "[", "index", "]", "=", "(", "b", "&", "15", ")", "+", "(", "data", "<<", "4", "&", "240", ")" ]
Sets the block a x, y, z to the specified id, and optionally data.
[ "Sets", "the", "block", "a", "x", "y", "z", "to", "the", "specified", "id", "and", "optionally", "data", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L393-L406
train
twoolie/NBT
nbt/chunk.py
BlockArray.get_block
def get_block(self, x,y,z, coord=False): """Return the id of the block at x, y, z.""" """ Laid out like: (0,0,0), (0,1,0), (0,2,0) ... (0,127,0), (0,0,1), (0,1,1), (0,2,1) ... (0,127,1), (0,0,2) ... (0,127,15), (1,0,0), (1,1,0) ... (15,127,15) :: blocks = [] for x in range(15): for z in range(15): for y in range(127): blocks.append(Block(x,y,z)) """ offset = y + z*128 + x*128*16 if (coord == False) else coord[1] + coord[2]*128 + coord[0]*128*16 return self.blocksList[offset]
python
def get_block(self, x,y,z, coord=False): """Return the id of the block at x, y, z.""" """ Laid out like: (0,0,0), (0,1,0), (0,2,0) ... (0,127,0), (0,0,1), (0,1,1), (0,2,1) ... (0,127,1), (0,0,2) ... (0,127,15), (1,0,0), (1,1,0) ... (15,127,15) :: blocks = [] for x in range(15): for z in range(15): for y in range(127): blocks.append(Block(x,y,z)) """ offset = y + z*128 + x*128*16 if (coord == False) else coord[1] + coord[2]*128 + coord[0]*128*16 return self.blocksList[offset]
[ "def", "get_block", "(", "self", ",", "x", ",", "y", ",", "z", ",", "coord", "=", "False", ")", ":", "\"\"\"\n Laid out like:\n (0,0,0), (0,1,0), (0,2,0) ... (0,127,0), (0,0,1), (0,1,1), (0,2,1) ... (0,127,1), (0,0,2) ... (0,127,15), (1,0,0), (1,1,0) ... (15,127,15)\n \n ::\n \n blocks = []\n for x in range(15):\n for z in range(15):\n for y in range(127):\n blocks.append(Block(x,y,z))\n \"\"\"", "offset", "=", "y", "+", "z", "*", "128", "+", "x", "*", "128", "*", "16", "if", "(", "coord", "==", "False", ")", "else", "coord", "[", "1", "]", "+", "coord", "[", "2", "]", "*", "128", "+", "coord", "[", "0", "]", "*", "128", "*", "16", "return", "self", ".", "blocksList", "[", "offset", "]" ]
Return the id of the block at x, y, z.
[ "Return", "the", "id", "of", "the", "block", "at", "x", "y", "z", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/chunk.py#L409-L425
train
twoolie/NBT
nbt/nbt.py
TAG.tag_info
def tag_info(self): """Return Unicode string with class, name and unnested value.""" return self.__class__.__name__ + ( '(%r)' % self.name if self.name else "") + ": " + self.valuestr()
python
def tag_info(self): """Return Unicode string with class, name and unnested value.""" return self.__class__.__name__ + ( '(%r)' % self.name if self.name else "") + ": " + self.valuestr()
[ "def", "tag_info", "(", "self", ")", ":", "return", "self", ".", "__class__", ".", "__name__", "+", "(", "'(%r)'", "%", "self", ".", "name", "if", "self", ".", "name", "else", "\"\"", ")", "+", "\": \"", "+", "self", ".", "valuestr", "(", ")" ]
Return Unicode string with class, name and unnested value.
[ "Return", "Unicode", "string", "with", "class", "name", "and", "unnested", "value", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/nbt.py#L56-L60
train
twoolie/NBT
nbt/nbt.py
NBTFile.parse_file
def parse_file(self, filename=None, buffer=None, fileobj=None): """Completely parse a file, extracting all tags.""" if filename: self.file = GzipFile(filename, 'rb') elif buffer: if hasattr(buffer, 'name'): self.filename = buffer.name self.file = buffer elif fileobj: if hasattr(fileobj, 'name'): self.filename = fileobj.name self.file = GzipFile(fileobj=fileobj) if self.file: try: type = TAG_Byte(buffer=self.file) if type.value == self.id: name = TAG_String(buffer=self.file).value self._parse_buffer(self.file) self.name = name self.file.close() else: raise MalformedFileError( "First record is not a Compound Tag") except StructError as e: raise MalformedFileError( "Partial File Parse: file possibly truncated.") else: raise ValueError( "NBTFile.parse_file(): Need to specify either a " "filename or a file object" )
python
def parse_file(self, filename=None, buffer=None, fileobj=None): """Completely parse a file, extracting all tags.""" if filename: self.file = GzipFile(filename, 'rb') elif buffer: if hasattr(buffer, 'name'): self.filename = buffer.name self.file = buffer elif fileobj: if hasattr(fileobj, 'name'): self.filename = fileobj.name self.file = GzipFile(fileobj=fileobj) if self.file: try: type = TAG_Byte(buffer=self.file) if type.value == self.id: name = TAG_String(buffer=self.file).value self._parse_buffer(self.file) self.name = name self.file.close() else: raise MalformedFileError( "First record is not a Compound Tag") except StructError as e: raise MalformedFileError( "Partial File Parse: file possibly truncated.") else: raise ValueError( "NBTFile.parse_file(): Need to specify either a " "filename or a file object" )
[ "def", "parse_file", "(", "self", ",", "filename", "=", "None", ",", "buffer", "=", "None", ",", "fileobj", "=", "None", ")", ":", "if", "filename", ":", "self", ".", "file", "=", "GzipFile", "(", "filename", ",", "'rb'", ")", "elif", "buffer", ":", "if", "hasattr", "(", "buffer", ",", "'name'", ")", ":", "self", ".", "filename", "=", "buffer", ".", "name", "self", ".", "file", "=", "buffer", "elif", "fileobj", ":", "if", "hasattr", "(", "fileobj", ",", "'name'", ")", ":", "self", ".", "filename", "=", "fileobj", ".", "name", "self", ".", "file", "=", "GzipFile", "(", "fileobj", "=", "fileobj", ")", "if", "self", ".", "file", ":", "try", ":", "type", "=", "TAG_Byte", "(", "buffer", "=", "self", ".", "file", ")", "if", "type", ".", "value", "==", "self", ".", "id", ":", "name", "=", "TAG_String", "(", "buffer", "=", "self", ".", "file", ")", ".", "value", "self", ".", "_parse_buffer", "(", "self", ".", "file", ")", "self", ".", "name", "=", "name", "self", ".", "file", ".", "close", "(", ")", "else", ":", "raise", "MalformedFileError", "(", "\"First record is not a Compound Tag\"", ")", "except", "StructError", "as", "e", ":", "raise", "MalformedFileError", "(", "\"Partial File Parse: file possibly truncated.\"", ")", "else", ":", "raise", "ValueError", "(", "\"NBTFile.parse_file(): Need to specify either a \"", "\"filename or a file object\"", ")" ]
Completely parse a file, extracting all tags.
[ "Completely", "parse", "a", "file", "extracting", "all", "tags", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/nbt.py#L641-L671
train
twoolie/NBT
nbt/nbt.py
NBTFile.write_file
def write_file(self, filename=None, buffer=None, fileobj=None): """Write this NBT file to a file.""" closefile = True if buffer: self.filename = None self.file = buffer closefile = False elif filename: self.filename = filename self.file = GzipFile(filename, "wb") elif fileobj: self.filename = None self.file = GzipFile(fileobj=fileobj, mode="wb") elif self.filename: self.file = GzipFile(self.filename, "wb") elif not self.file: raise ValueError( "NBTFile.write_file(): Need to specify either a " "filename or a file object" ) # Render tree to file TAG_Byte(self.id)._render_buffer(self.file) TAG_String(self.name)._render_buffer(self.file) self._render_buffer(self.file) # make sure the file is complete try: self.file.flush() except (AttributeError, IOError): pass if closefile: try: self.file.close() except (AttributeError, IOError): pass
python
def write_file(self, filename=None, buffer=None, fileobj=None): """Write this NBT file to a file.""" closefile = True if buffer: self.filename = None self.file = buffer closefile = False elif filename: self.filename = filename self.file = GzipFile(filename, "wb") elif fileobj: self.filename = None self.file = GzipFile(fileobj=fileobj, mode="wb") elif self.filename: self.file = GzipFile(self.filename, "wb") elif not self.file: raise ValueError( "NBTFile.write_file(): Need to specify either a " "filename or a file object" ) # Render tree to file TAG_Byte(self.id)._render_buffer(self.file) TAG_String(self.name)._render_buffer(self.file) self._render_buffer(self.file) # make sure the file is complete try: self.file.flush() except (AttributeError, IOError): pass if closefile: try: self.file.close() except (AttributeError, IOError): pass
[ "def", "write_file", "(", "self", ",", "filename", "=", "None", ",", "buffer", "=", "None", ",", "fileobj", "=", "None", ")", ":", "closefile", "=", "True", "if", "buffer", ":", "self", ".", "filename", "=", "None", "self", ".", "file", "=", "buffer", "closefile", "=", "False", "elif", "filename", ":", "self", ".", "filename", "=", "filename", "self", ".", "file", "=", "GzipFile", "(", "filename", ",", "\"wb\"", ")", "elif", "fileobj", ":", "self", ".", "filename", "=", "None", "self", ".", "file", "=", "GzipFile", "(", "fileobj", "=", "fileobj", ",", "mode", "=", "\"wb\"", ")", "elif", "self", ".", "filename", ":", "self", ".", "file", "=", "GzipFile", "(", "self", ".", "filename", ",", "\"wb\"", ")", "elif", "not", "self", ".", "file", ":", "raise", "ValueError", "(", "\"NBTFile.write_file(): Need to specify either a \"", "\"filename or a file object\"", ")", "# Render tree to file", "TAG_Byte", "(", "self", ".", "id", ")", ".", "_render_buffer", "(", "self", ".", "file", ")", "TAG_String", "(", "self", ".", "name", ")", ".", "_render_buffer", "(", "self", ".", "file", ")", "self", ".", "_render_buffer", "(", "self", ".", "file", ")", "# make sure the file is complete", "try", ":", "self", ".", "file", ".", "flush", "(", ")", "except", "(", "AttributeError", ",", "IOError", ")", ":", "pass", "if", "closefile", ":", "try", ":", "self", ".", "file", ".", "close", "(", ")", "except", "(", "AttributeError", ",", "IOError", ")", ":", "pass" ]
Write this NBT file to a file.
[ "Write", "this", "NBT", "file", "to", "a", "file", "." ]
b06dd6cc8117d2788da1d8416e642d58bad45762
https://github.com/twoolie/NBT/blob/b06dd6cc8117d2788da1d8416e642d58bad45762/nbt/nbt.py#L673-L706
train
argaen/aiocache
aiocache/serializers/serializers.py
MsgPackSerializer.loads
def loads(self, value): """ Deserialize value using ``msgpack.loads``. :param value: bytes :returns: obj """ raw = False if self.encoding == "utf-8" else True if value is None: return None return msgpack.loads(value, raw=raw, use_list=self.use_list)
python
def loads(self, value): """ Deserialize value using ``msgpack.loads``. :param value: bytes :returns: obj """ raw = False if self.encoding == "utf-8" else True if value is None: return None return msgpack.loads(value, raw=raw, use_list=self.use_list)
[ "def", "loads", "(", "self", ",", "value", ")", ":", "raw", "=", "False", "if", "self", ".", "encoding", "==", "\"utf-8\"", "else", "True", "if", "value", "is", "None", ":", "return", "None", "return", "msgpack", ".", "loads", "(", "value", ",", "raw", "=", "raw", ",", "use_list", "=", "self", ".", "use_list", ")" ]
Deserialize value using ``msgpack.loads``. :param value: bytes :returns: obj
[ "Deserialize", "value", "using", "msgpack", ".", "loads", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/serializers/serializers.py#L178-L188
train
argaen/aiocache
aiocache/base.py
API.aiocache_enabled
def aiocache_enabled(cls, fake_return=None): """ Use this decorator to be able to fake the return of the function by setting the ``AIOCACHE_DISABLE`` environment variable """ def enabled(func): @functools.wraps(func) async def _enabled(*args, **kwargs): if os.getenv("AIOCACHE_DISABLE") == "1": return fake_return return await func(*args, **kwargs) return _enabled return enabled
python
def aiocache_enabled(cls, fake_return=None): """ Use this decorator to be able to fake the return of the function by setting the ``AIOCACHE_DISABLE`` environment variable """ def enabled(func): @functools.wraps(func) async def _enabled(*args, **kwargs): if os.getenv("AIOCACHE_DISABLE") == "1": return fake_return return await func(*args, **kwargs) return _enabled return enabled
[ "def", "aiocache_enabled", "(", "cls", ",", "fake_return", "=", "None", ")", ":", "def", "enabled", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "async", "def", "_enabled", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "os", ".", "getenv", "(", "\"AIOCACHE_DISABLE\"", ")", "==", "\"1\"", ":", "return", "fake_return", "return", "await", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_enabled", "return", "enabled" ]
Use this decorator to be able to fake the return of the function by setting the ``AIOCACHE_DISABLE`` environment variable
[ "Use", "this", "decorator", "to", "be", "able", "to", "fake", "the", "return", "of", "the", "function", "by", "setting", "the", "AIOCACHE_DISABLE", "environment", "variable" ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L50-L65
train
argaen/aiocache
aiocache/base.py
BaseCache.add
async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None): """ Stores the value in the given key with ttl if specified. Raises an error if the key already exists. :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key is inserted :raises: - ValueError if key already exists - :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps ns_key = self.build_key(key, namespace=namespace) await self._add(ns_key, dumps(value), ttl=self._get_ttl(ttl), _conn=_conn) logger.debug("ADD %s %s (%.4f)s", ns_key, True, time.monotonic() - start) return True
python
async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None): """ Stores the value in the given key with ttl if specified. Raises an error if the key already exists. :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key is inserted :raises: - ValueError if key already exists - :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps ns_key = self.build_key(key, namespace=namespace) await self._add(ns_key, dumps(value), ttl=self._get_ttl(ttl), _conn=_conn) logger.debug("ADD %s %s (%.4f)s", ns_key, True, time.monotonic() - start) return True
[ "async", "def", "add", "(", "self", ",", "key", ",", "value", ",", "ttl", "=", "SENTINEL", ",", "dumps_fn", "=", "None", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "dumps", "=", "dumps_fn", "or", "self", ".", "_serializer", ".", "dumps", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "await", "self", ".", "_add", "(", "ns_key", ",", "dumps", "(", "value", ")", ",", "ttl", "=", "self", ".", "_get_ttl", "(", "ttl", ")", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"ADD %s %s (%.4f)s\"", ",", "ns_key", ",", "True", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "True" ]
Stores the value in the given key with ttl if specified. Raises an error if the key already exists. :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key is inserted :raises: - ValueError if key already exists - :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Stores", "the", "value", "in", "the", "given", "key", "with", "ttl", "if", "specified", ".", "Raises", "an", "error", "if", "the", "key", "already", "exists", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L140-L166
train
argaen/aiocache
aiocache/base.py
BaseCache.get
async def get(self, key, default=None, loads_fn=None, namespace=None, _conn=None): """ Get a value from the cache. Returns default if not found. :param key: str :param default: obj to return when key is not found :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: obj loaded :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() loads = loads_fn or self._serializer.loads ns_key = self.build_key(key, namespace=namespace) value = loads(await self._get(ns_key, encoding=self.serializer.encoding, _conn=_conn)) logger.debug("GET %s %s (%.4f)s", ns_key, value is not None, time.monotonic() - start) return value if value is not None else default
python
async def get(self, key, default=None, loads_fn=None, namespace=None, _conn=None): """ Get a value from the cache. Returns default if not found. :param key: str :param default: obj to return when key is not found :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: obj loaded :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() loads = loads_fn or self._serializer.loads ns_key = self.build_key(key, namespace=namespace) value = loads(await self._get(ns_key, encoding=self.serializer.encoding, _conn=_conn)) logger.debug("GET %s %s (%.4f)s", ns_key, value is not None, time.monotonic() - start) return value if value is not None else default
[ "async", "def", "get", "(", "self", ",", "key", ",", "default", "=", "None", ",", "loads_fn", "=", "None", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "loads", "=", "loads_fn", "or", "self", ".", "_serializer", ".", "loads", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "value", "=", "loads", "(", "await", "self", ".", "_get", "(", "ns_key", ",", "encoding", "=", "self", ".", "serializer", ".", "encoding", ",", "_conn", "=", "_conn", ")", ")", "logger", ".", "debug", "(", "\"GET %s %s (%.4f)s\"", ",", "ns_key", ",", "value", "is", "not", "None", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "value", "if", "value", "is", "not", "None", "else", "default" ]
Get a value from the cache. Returns default if not found. :param key: str :param default: obj to return when key is not found :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: obj loaded :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Get", "a", "value", "from", "the", "cache", ".", "Returns", "default", "if", "not", "found", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L175-L195
train
argaen/aiocache
aiocache/base.py
BaseCache.multi_get
async def multi_get(self, keys, loads_fn=None, namespace=None, _conn=None): """ Get multiple values from the cache, values not found are Nones. :param keys: list of str :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: list of objs :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() loads = loads_fn or self._serializer.loads ns_keys = [self.build_key(key, namespace=namespace) for key in keys] values = [ loads(value) for value in await self._multi_get( ns_keys, encoding=self.serializer.encoding, _conn=_conn ) ] logger.debug( "MULTI_GET %s %d (%.4f)s", ns_keys, len([value for value in values if value is not None]), time.monotonic() - start, ) return values
python
async def multi_get(self, keys, loads_fn=None, namespace=None, _conn=None): """ Get multiple values from the cache, values not found are Nones. :param keys: list of str :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: list of objs :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() loads = loads_fn or self._serializer.loads ns_keys = [self.build_key(key, namespace=namespace) for key in keys] values = [ loads(value) for value in await self._multi_get( ns_keys, encoding=self.serializer.encoding, _conn=_conn ) ] logger.debug( "MULTI_GET %s %d (%.4f)s", ns_keys, len([value for value in values if value is not None]), time.monotonic() - start, ) return values
[ "async", "def", "multi_get", "(", "self", ",", "keys", ",", "loads_fn", "=", "None", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "loads", "=", "loads_fn", "or", "self", ".", "_serializer", ".", "loads", "ns_keys", "=", "[", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "for", "key", "in", "keys", "]", "values", "=", "[", "loads", "(", "value", ")", "for", "value", "in", "await", "self", ".", "_multi_get", "(", "ns_keys", ",", "encoding", "=", "self", ".", "serializer", ".", "encoding", ",", "_conn", "=", "_conn", ")", "]", "logger", ".", "debug", "(", "\"MULTI_GET %s %d (%.4f)s\"", ",", "ns_keys", ",", "len", "(", "[", "value", "for", "value", "in", "values", "if", "value", "is", "not", "None", "]", ")", ",", "time", ".", "monotonic", "(", ")", "-", "start", ",", ")", "return", "values" ]
Get multiple values from the cache, values not found are Nones. :param keys: list of str :param loads_fn: callable alternative to use as loads function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: list of objs :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Get", "multiple", "values", "from", "the", "cache", "values", "not", "found", "are", "Nones", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L204-L233
train
argaen/aiocache
aiocache/base.py
BaseCache.set
async def set( self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _cas_token=None, _conn=None ): """ Stores the value in the given key with ttl if specified :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if the value was set :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps ns_key = self.build_key(key, namespace=namespace) res = await self._set( ns_key, dumps(value), ttl=self._get_ttl(ttl), _cas_token=_cas_token, _conn=_conn ) logger.debug("SET %s %d (%.4f)s", ns_key, True, time.monotonic() - start) return res
python
async def set( self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _cas_token=None, _conn=None ): """ Stores the value in the given key with ttl if specified :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if the value was set :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps ns_key = self.build_key(key, namespace=namespace) res = await self._set( ns_key, dumps(value), ttl=self._get_ttl(ttl), _cas_token=_cas_token, _conn=_conn ) logger.debug("SET %s %d (%.4f)s", ns_key, True, time.monotonic() - start) return res
[ "async", "def", "set", "(", "self", ",", "key", ",", "value", ",", "ttl", "=", "SENTINEL", ",", "dumps_fn", "=", "None", ",", "namespace", "=", "None", ",", "_cas_token", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "dumps", "=", "dumps_fn", "or", "self", ".", "_serializer", ".", "dumps", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "res", "=", "await", "self", ".", "_set", "(", "ns_key", ",", "dumps", "(", "value", ")", ",", "ttl", "=", "self", ".", "_get_ttl", "(", "ttl", ")", ",", "_cas_token", "=", "_cas_token", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"SET %s %d (%.4f)s\"", ",", "ns_key", ",", "True", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "res" ]
Stores the value in the given key with ttl if specified :param key: str :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if the value was set :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Stores", "the", "value", "in", "the", "given", "key", "with", "ttl", "if", "specified" ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L242-L269
train
argaen/aiocache
aiocache/base.py
BaseCache.multi_set
async def multi_set(self, pairs, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None): """ Stores multiple values in the given keys. :param pairs: list of two element iterables. First is key and second is value :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps tmp_pairs = [] for key, value in pairs: tmp_pairs.append((self.build_key(key, namespace=namespace), dumps(value))) await self._multi_set(tmp_pairs, ttl=self._get_ttl(ttl), _conn=_conn) logger.debug( "MULTI_SET %s %d (%.4f)s", [key for key, value in tmp_pairs], len(pairs), time.monotonic() - start, ) return True
python
async def multi_set(self, pairs, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None): """ Stores multiple values in the given keys. :param pairs: list of two element iterables. First is key and second is value :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() dumps = dumps_fn or self._serializer.dumps tmp_pairs = [] for key, value in pairs: tmp_pairs.append((self.build_key(key, namespace=namespace), dumps(value))) await self._multi_set(tmp_pairs, ttl=self._get_ttl(ttl), _conn=_conn) logger.debug( "MULTI_SET %s %d (%.4f)s", [key for key, value in tmp_pairs], len(pairs), time.monotonic() - start, ) return True
[ "async", "def", "multi_set", "(", "self", ",", "pairs", ",", "ttl", "=", "SENTINEL", ",", "dumps_fn", "=", "None", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "dumps", "=", "dumps_fn", "or", "self", ".", "_serializer", ".", "dumps", "tmp_pairs", "=", "[", "]", "for", "key", ",", "value", "in", "pairs", ":", "tmp_pairs", ".", "append", "(", "(", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", ",", "dumps", "(", "value", ")", ")", ")", "await", "self", ".", "_multi_set", "(", "tmp_pairs", ",", "ttl", "=", "self", ".", "_get_ttl", "(", "ttl", ")", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"MULTI_SET %s %d (%.4f)s\"", ",", "[", "key", "for", "key", ",", "value", "in", "tmp_pairs", "]", ",", "len", "(", "pairs", ")", ",", "time", ".", "monotonic", "(", ")", "-", "start", ",", ")", "return", "True" ]
Stores multiple values in the given keys. :param pairs: list of two element iterables. First is key and second is value :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you need miliseconds, redis and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Stores", "multiple", "values", "in", "the", "given", "keys", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L278-L308
train
argaen/aiocache
aiocache/base.py
BaseCache.delete
async def delete(self, key, namespace=None, _conn=None): """ Deletes the given key. :param key: Key to be deleted :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: int number of deleted keys :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._delete(ns_key, _conn=_conn) logger.debug("DELETE %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
python
async def delete(self, key, namespace=None, _conn=None): """ Deletes the given key. :param key: Key to be deleted :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: int number of deleted keys :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._delete(ns_key, _conn=_conn) logger.debug("DELETE %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
[ "async", "def", "delete", "(", "self", ",", "key", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "ret", "=", "await", "self", ".", "_delete", "(", "ns_key", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"DELETE %s %d (%.4f)s\"", ",", "ns_key", ",", "ret", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Deletes the given key. :param key: Key to be deleted :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: int number of deleted keys :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Deletes", "the", "given", "key", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L317-L332
train
argaen/aiocache
aiocache/base.py
BaseCache.exists
async def exists(self, key, namespace=None, _conn=None): """ Check key exists in the cache. :param key: str key to check :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key exists otherwise False :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._exists(ns_key, _conn=_conn) logger.debug("EXISTS %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
python
async def exists(self, key, namespace=None, _conn=None): """ Check key exists in the cache. :param key: str key to check :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key exists otherwise False :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._exists(ns_key, _conn=_conn) logger.debug("EXISTS %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
[ "async", "def", "exists", "(", "self", ",", "key", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "ret", "=", "await", "self", ".", "_exists", "(", "ns_key", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"EXISTS %s %d (%.4f)s\"", ",", "ns_key", ",", "ret", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Check key exists in the cache. :param key: str key to check :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if key exists otherwise False :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Check", "key", "exists", "in", "the", "cache", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L341-L356
train
argaen/aiocache
aiocache/base.py
BaseCache.expire
async def expire(self, key, ttl, namespace=None, _conn=None): """ Set the ttl to the given key. By setting it to 0, it will disable it :param key: str key to expire :param ttl: int number of seconds for expiration. If 0, ttl is disabled :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if set, False if key is not found :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._expire(ns_key, ttl, _conn=_conn) logger.debug("EXPIRE %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
python
async def expire(self, key, ttl, namespace=None, _conn=None): """ Set the ttl to the given key. By setting it to 0, it will disable it :param key: str key to expire :param ttl: int number of seconds for expiration. If 0, ttl is disabled :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if set, False if key is not found :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._expire(ns_key, ttl, _conn=_conn) logger.debug("EXPIRE %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
[ "async", "def", "expire", "(", "self", ",", "key", ",", "ttl", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ns_key", "=", "self", ".", "build_key", "(", "key", ",", "namespace", "=", "namespace", ")", "ret", "=", "await", "self", ".", "_expire", "(", "ns_key", ",", "ttl", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"EXPIRE %s %d (%.4f)s\"", ",", "ns_key", ",", "ret", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Set the ttl to the given key. By setting it to 0, it will disable it :param key: str key to expire :param ttl: int number of seconds for expiration. If 0, ttl is disabled :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True if set, False if key is not found :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Set", "the", "ttl", "to", "the", "given", "key", ".", "By", "setting", "it", "to", "0", "it", "will", "disable", "it" ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L392-L408
train
argaen/aiocache
aiocache/base.py
BaseCache.clear
async def clear(self, namespace=None, _conn=None): """ Clears the cache in the cache namespace. If an alternative namespace is given, it will clear those ones instead. :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._clear(namespace, _conn=_conn) logger.debug("CLEAR %s %d (%.4f)s", namespace, ret, time.monotonic() - start) return ret
python
async def clear(self, namespace=None, _conn=None): """ Clears the cache in the cache namespace. If an alternative namespace is given, it will clear those ones instead. :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._clear(namespace, _conn=_conn) logger.debug("CLEAR %s %d (%.4f)s", namespace, ret, time.monotonic() - start) return ret
[ "async", "def", "clear", "(", "self", ",", "namespace", "=", "None", ",", "_conn", "=", "None", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ret", "=", "await", "self", ".", "_clear", "(", "namespace", ",", "_conn", "=", "_conn", ")", "logger", ".", "debug", "(", "\"CLEAR %s %d (%.4f)s\"", ",", "namespace", ",", "ret", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Clears the cache in the cache namespace. If an alternative namespace is given, it will clear those ones instead. :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: True :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Clears", "the", "cache", "in", "the", "cache", "namespace", ".", "If", "an", "alternative", "namespace", "is", "given", "it", "will", "clear", "those", "ones", "instead", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L417-L431
train
argaen/aiocache
aiocache/base.py
BaseCache.raw
async def raw(self, command, *args, _conn=None, **kwargs): """ Send the raw command to the underlying client. Note that by using this CMD you will lose compatibility with other backends. Due to limitations with aiomcache client, args have to be provided as bytes. For rest of backends, str. :param command: str with the command. :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: whatever the underlying client returns :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._raw( command, *args, encoding=self.serializer.encoding, _conn=_conn, **kwargs ) logger.debug("%s (%.4f)s", command, time.monotonic() - start) return ret
python
async def raw(self, command, *args, _conn=None, **kwargs): """ Send the raw command to the underlying client. Note that by using this CMD you will lose compatibility with other backends. Due to limitations with aiomcache client, args have to be provided as bytes. For rest of backends, str. :param command: str with the command. :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: whatever the underlying client returns :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._raw( command, *args, encoding=self.serializer.encoding, _conn=_conn, **kwargs ) logger.debug("%s (%.4f)s", command, time.monotonic() - start) return ret
[ "async", "def", "raw", "(", "self", ",", "command", ",", "*", "args", ",", "_conn", "=", "None", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ret", "=", "await", "self", ".", "_raw", "(", "command", ",", "*", "args", ",", "encoding", "=", "self", ".", "serializer", ".", "encoding", ",", "_conn", "=", "_conn", ",", "*", "*", "kwargs", ")", "logger", ".", "debug", "(", "\"%s (%.4f)s\"", ",", "command", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Send the raw command to the underlying client. Note that by using this CMD you will lose compatibility with other backends. Due to limitations with aiomcache client, args have to be provided as bytes. For rest of backends, str. :param command: str with the command. :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: whatever the underlying client returns :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Send", "the", "raw", "command", "to", "the", "underlying", "client", ".", "Note", "that", "by", "using", "this", "CMD", "you", "will", "lose", "compatibility", "with", "other", "backends", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L440-L459
train
argaen/aiocache
aiocache/base.py
BaseCache.close
async def close(self, *args, _conn=None, **kwargs): """ Perform any resource clean up necessary to exit the program safely. After closing, cmd execution is still possible but you will have to close again before exiting. :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._close(*args, _conn=_conn, **kwargs) logger.debug("CLOSE (%.4f)s", time.monotonic() - start) return ret
python
async def close(self, *args, _conn=None, **kwargs): """ Perform any resource clean up necessary to exit the program safely. After closing, cmd execution is still possible but you will have to close again before exiting. :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout """ start = time.monotonic() ret = await self._close(*args, _conn=_conn, **kwargs) logger.debug("CLOSE (%.4f)s", time.monotonic() - start) return ret
[ "async", "def", "close", "(", "self", ",", "*", "args", ",", "_conn", "=", "None", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", ".", "monotonic", "(", ")", "ret", "=", "await", "self", ".", "_close", "(", "*", "args", ",", "_conn", "=", "_conn", ",", "*", "*", "kwargs", ")", "logger", ".", "debug", "(", "\"CLOSE (%.4f)s\"", ",", "time", ".", "monotonic", "(", ")", "-", "start", ")", "return", "ret" ]
Perform any resource clean up necessary to exit the program safely. After closing, cmd execution is still possible but you will have to close again before exiting. :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
[ "Perform", "any", "resource", "clean", "up", "necessary", "to", "exit", "the", "program", "safely", ".", "After", "closing", "cmd", "execution", "is", "still", "possible", "but", "you", "will", "have", "to", "close", "again", "before", "exiting", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/base.py#L465-L476
train
argaen/aiocache
aiocache/factory.py
CacheHandler.get
def get(self, alias: str): """ Retrieve cache identified by alias. Will return always the same instance If the cache was not instantiated yet, it will do it lazily the first time this is called. :param alias: str cache alias :return: cache instance """ try: return self._caches[alias] except KeyError: pass config = self.get_alias_config(alias) cache = _create_cache(**deepcopy(config)) self._caches[alias] = cache return cache
python
def get(self, alias: str): """ Retrieve cache identified by alias. Will return always the same instance If the cache was not instantiated yet, it will do it lazily the first time this is called. :param alias: str cache alias :return: cache instance """ try: return self._caches[alias] except KeyError: pass config = self.get_alias_config(alias) cache = _create_cache(**deepcopy(config)) self._caches[alias] = cache return cache
[ "def", "get", "(", "self", ",", "alias", ":", "str", ")", ":", "try", ":", "return", "self", ".", "_caches", "[", "alias", "]", "except", "KeyError", ":", "pass", "config", "=", "self", ".", "get_alias_config", "(", "alias", ")", "cache", "=", "_create_cache", "(", "*", "*", "deepcopy", "(", "config", ")", ")", "self", ".", "_caches", "[", "alias", "]", "=", "cache", "return", "cache" ]
Retrieve cache identified by alias. Will return always the same instance If the cache was not instantiated yet, it will do it lazily the first time this is called. :param alias: str cache alias :return: cache instance
[ "Retrieve", "cache", "identified", "by", "alias", ".", "Will", "return", "always", "the", "same", "instance" ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/factory.py#L158-L176
train
argaen/aiocache
aiocache/factory.py
CacheHandler.create
def create(self, alias=None, cache=None, **kwargs): """ Create a new cache. Either alias or cache params are required. You can use kwargs to pass extra parameters to configure the cache. .. deprecated:: 0.11.0 Only creating a cache passing an alias is supported. If you want to create a cache passing explicit cache and kwargs use ``aiocache.Cache``. :param alias: str alias to pull configuration from :param cache: str or class cache class to use for creating the new cache (when no alias is used) :return: New cache instance """ if alias: config = self.get_alias_config(alias) elif cache: warnings.warn( "Creating a cache with an explicit config is deprecated, use 'aiocache.Cache'", DeprecationWarning, ) config = {"cache": cache} else: raise TypeError("create call needs to receive an alias or a cache") cache = _create_cache(**{**config, **kwargs}) return cache
python
def create(self, alias=None, cache=None, **kwargs): """ Create a new cache. Either alias or cache params are required. You can use kwargs to pass extra parameters to configure the cache. .. deprecated:: 0.11.0 Only creating a cache passing an alias is supported. If you want to create a cache passing explicit cache and kwargs use ``aiocache.Cache``. :param alias: str alias to pull configuration from :param cache: str or class cache class to use for creating the new cache (when no alias is used) :return: New cache instance """ if alias: config = self.get_alias_config(alias) elif cache: warnings.warn( "Creating a cache with an explicit config is deprecated, use 'aiocache.Cache'", DeprecationWarning, ) config = {"cache": cache} else: raise TypeError("create call needs to receive an alias or a cache") cache = _create_cache(**{**config, **kwargs}) return cache
[ "def", "create", "(", "self", ",", "alias", "=", "None", ",", "cache", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "alias", ":", "config", "=", "self", ".", "get_alias_config", "(", "alias", ")", "elif", "cache", ":", "warnings", ".", "warn", "(", "\"Creating a cache with an explicit config is deprecated, use 'aiocache.Cache'\"", ",", "DeprecationWarning", ",", ")", "config", "=", "{", "\"cache\"", ":", "cache", "}", "else", ":", "raise", "TypeError", "(", "\"create call needs to receive an alias or a cache\"", ")", "cache", "=", "_create_cache", "(", "*", "*", "{", "*", "*", "config", ",", "*", "*", "kwargs", "}", ")", "return", "cache" ]
Create a new cache. Either alias or cache params are required. You can use kwargs to pass extra parameters to configure the cache. .. deprecated:: 0.11.0 Only creating a cache passing an alias is supported. If you want to create a cache passing explicit cache and kwargs use ``aiocache.Cache``. :param alias: str alias to pull configuration from :param cache: str or class cache class to use for creating the new cache (when no alias is used) :return: New cache instance
[ "Create", "a", "new", "cache", ".", "Either", "alias", "or", "cache", "params", "are", "required", ".", "You", "can", "use", "kwargs", "to", "pass", "extra", "parameters", "to", "configure", "the", "cache", "." ]
fdd282f37283ca04e22209f4d2ae4900f29e1688
https://github.com/argaen/aiocache/blob/fdd282f37283ca04e22209f4d2ae4900f29e1688/aiocache/factory.py#L178-L203
train
Azure/msrest-for-python
msrest/polling/async_poller.py
async_poller
async def async_poller(client, initial_response, deserialization_callback, polling_method): """Async Poller for long running operations. :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. :type client: msrest.service_client.ServiceClient :param initial_response: The initial call response :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. :type deserialization_callback: callable or msrest.serialization.Model :param polling_method: The polling strategy to adopt :type polling_method: msrest.polling.PollingMethod """ try: client = client if isinstance(client, ServiceClientAsync) else client._client except AttributeError: raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): deserialization_callback = deserialization_callback.deserialize # Might raise a CloudError polling_method.initialize(client, response, deserialization_callback) await polling_method.run() return polling_method.resource()
python
async def async_poller(client, initial_response, deserialization_callback, polling_method): """Async Poller for long running operations. :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. :type client: msrest.service_client.ServiceClient :param initial_response: The initial call response :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. :type deserialization_callback: callable or msrest.serialization.Model :param polling_method: The polling strategy to adopt :type polling_method: msrest.polling.PollingMethod """ try: client = client if isinstance(client, ServiceClientAsync) else client._client except AttributeError: raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): deserialization_callback = deserialization_callback.deserialize # Might raise a CloudError polling_method.initialize(client, response, deserialization_callback) await polling_method.run() return polling_method.resource()
[ "async", "def", "async_poller", "(", "client", ",", "initial_response", ",", "deserialization_callback", ",", "polling_method", ")", ":", "try", ":", "client", "=", "client", "if", "isinstance", "(", "client", ",", "ServiceClientAsync", ")", "else", "client", ".", "_client", "except", "AttributeError", ":", "raise", "ValueError", "(", "\"Poller client parameter must be a low-level msrest Service Client or a SDK client.\"", ")", "response", "=", "initial_response", ".", "response", "if", "isinstance", "(", "initial_response", ",", "ClientRawResponse", ")", "else", "initial_response", "if", "isinstance", "(", "deserialization_callback", ",", "type", ")", "and", "issubclass", "(", "deserialization_callback", ",", "Model", ")", ":", "deserialization_callback", "=", "deserialization_callback", ".", "deserialize", "# Might raise a CloudError", "polling_method", ".", "initialize", "(", "client", ",", "response", ",", "deserialization_callback", ")", "await", "polling_method", ".", "run", "(", ")", "return", "polling_method", ".", "resource", "(", ")" ]
Async Poller for long running operations. :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. :type client: msrest.service_client.ServiceClient :param initial_response: The initial call response :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. :type deserialization_callback: callable or msrest.serialization.Model :param polling_method: The polling strategy to adopt :type polling_method: msrest.polling.PollingMethod
[ "Async", "Poller", "for", "long", "running", "operations", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/polling/async_poller.py#L62-L88
train
Azure/msrest-for-python
msrest/pipeline/requests.py
RequestsPatchSession.send
def send(self, request, **kwargs): """Patch the current session with Request level operation config. This is deprecated, we shouldn't patch the session with arguments at the Request, and "config" should be used. """ session = request.context.session old_max_redirects = None if 'max_redirects' in kwargs: warnings.warn("max_redirects in operation kwargs is deprecated, use config.redirect_policy instead", DeprecationWarning) old_max_redirects = session.max_redirects session.max_redirects = int(kwargs['max_redirects']) old_trust_env = None if 'use_env_proxies' in kwargs: warnings.warn("use_env_proxies in operation kwargs is deprecated, use config.proxies instead", DeprecationWarning) old_trust_env = session.trust_env session.trust_env = bool(kwargs['use_env_proxies']) old_retries = {} if 'retries' in kwargs: warnings.warn("retries in operation kwargs is deprecated, use config.retry_policy instead", DeprecationWarning) max_retries = kwargs['retries'] for protocol in self._protocols: old_retries[protocol] = session.adapters[protocol].max_retries session.adapters[protocol].max_retries = max_retries try: return self.next.send(request, **kwargs) finally: if old_max_redirects: session.max_redirects = old_max_redirects if old_trust_env: session.trust_env = old_trust_env if old_retries: for protocol in self._protocols: session.adapters[protocol].max_retries = old_retries[protocol]
python
def send(self, request, **kwargs): """Patch the current session with Request level operation config. This is deprecated, we shouldn't patch the session with arguments at the Request, and "config" should be used. """ session = request.context.session old_max_redirects = None if 'max_redirects' in kwargs: warnings.warn("max_redirects in operation kwargs is deprecated, use config.redirect_policy instead", DeprecationWarning) old_max_redirects = session.max_redirects session.max_redirects = int(kwargs['max_redirects']) old_trust_env = None if 'use_env_proxies' in kwargs: warnings.warn("use_env_proxies in operation kwargs is deprecated, use config.proxies instead", DeprecationWarning) old_trust_env = session.trust_env session.trust_env = bool(kwargs['use_env_proxies']) old_retries = {} if 'retries' in kwargs: warnings.warn("retries in operation kwargs is deprecated, use config.retry_policy instead", DeprecationWarning) max_retries = kwargs['retries'] for protocol in self._protocols: old_retries[protocol] = session.adapters[protocol].max_retries session.adapters[protocol].max_retries = max_retries try: return self.next.send(request, **kwargs) finally: if old_max_redirects: session.max_redirects = old_max_redirects if old_trust_env: session.trust_env = old_trust_env if old_retries: for protocol in self._protocols: session.adapters[protocol].max_retries = old_retries[protocol]
[ "def", "send", "(", "self", ",", "request", ",", "*", "*", "kwargs", ")", ":", "session", "=", "request", ".", "context", ".", "session", "old_max_redirects", "=", "None", "if", "'max_redirects'", "in", "kwargs", ":", "warnings", ".", "warn", "(", "\"max_redirects in operation kwargs is deprecated, use config.redirect_policy instead\"", ",", "DeprecationWarning", ")", "old_max_redirects", "=", "session", ".", "max_redirects", "session", ".", "max_redirects", "=", "int", "(", "kwargs", "[", "'max_redirects'", "]", ")", "old_trust_env", "=", "None", "if", "'use_env_proxies'", "in", "kwargs", ":", "warnings", ".", "warn", "(", "\"use_env_proxies in operation kwargs is deprecated, use config.proxies instead\"", ",", "DeprecationWarning", ")", "old_trust_env", "=", "session", ".", "trust_env", "session", ".", "trust_env", "=", "bool", "(", "kwargs", "[", "'use_env_proxies'", "]", ")", "old_retries", "=", "{", "}", "if", "'retries'", "in", "kwargs", ":", "warnings", ".", "warn", "(", "\"retries in operation kwargs is deprecated, use config.retry_policy instead\"", ",", "DeprecationWarning", ")", "max_retries", "=", "kwargs", "[", "'retries'", "]", "for", "protocol", "in", "self", ".", "_protocols", ":", "old_retries", "[", "protocol", "]", "=", "session", ".", "adapters", "[", "protocol", "]", ".", "max_retries", "session", ".", "adapters", "[", "protocol", "]", ".", "max_retries", "=", "max_retries", "try", ":", "return", "self", ".", "next", ".", "send", "(", "request", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "old_max_redirects", ":", "session", ".", "max_redirects", "=", "old_max_redirects", "if", "old_trust_env", ":", "session", ".", "trust_env", "=", "old_trust_env", "if", "old_retries", ":", "for", "protocol", "in", "self", ".", "_protocols", ":", "session", ".", "adapters", "[", "protocol", "]", ".", "max_retries", "=", "old_retries", "[", "protocol", "]" ]
Patch the current session with Request level operation config. This is deprecated, we shouldn't patch the session with arguments at the Request, and "config" should be used.
[ "Patch", "the", "current", "session", "with", "Request", "level", "operation", "config", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/pipeline/requests.py#L105-L147
train
Azure/msrest-for-python
msrest/service_client.py
_ServiceClientCore._request
def _request(self, method, url, params, headers, content, form_content): # type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create ClientRequest object. :param str url: URL for the request. :param dict params: URL query parameters. :param dict headers: Headers :param dict form_content: Form content """ request = ClientRequest(method, self.format_url(url)) if params: request.format_parameters(params) if headers: request.headers.update(headers) # All requests should contain a Accept. # This should be done by Autorest, but wasn't in old Autorest # Force it for now, but might deprecate it later. if "Accept" not in request.headers: _LOGGER.debug("Accept header absent and forced to application/json") request.headers['Accept'] = 'application/json' if content is not None: request.add_content(content) if form_content: request.add_formdata(form_content) return request
python
def _request(self, method, url, params, headers, content, form_content): # type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create ClientRequest object. :param str url: URL for the request. :param dict params: URL query parameters. :param dict headers: Headers :param dict form_content: Form content """ request = ClientRequest(method, self.format_url(url)) if params: request.format_parameters(params) if headers: request.headers.update(headers) # All requests should contain a Accept. # This should be done by Autorest, but wasn't in old Autorest # Force it for now, but might deprecate it later. if "Accept" not in request.headers: _LOGGER.debug("Accept header absent and forced to application/json") request.headers['Accept'] = 'application/json' if content is not None: request.add_content(content) if form_content: request.add_formdata(form_content) return request
[ "def", "_request", "(", "self", ",", "method", ",", "url", ",", "params", ",", "headers", ",", "content", ",", "form_content", ")", ":", "# type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest", "request", "=", "ClientRequest", "(", "method", ",", "self", ".", "format_url", "(", "url", ")", ")", "if", "params", ":", "request", ".", "format_parameters", "(", "params", ")", "if", "headers", ":", "request", ".", "headers", ".", "update", "(", "headers", ")", "# All requests should contain a Accept.", "# This should be done by Autorest, but wasn't in old Autorest", "# Force it for now, but might deprecate it later.", "if", "\"Accept\"", "not", "in", "request", ".", "headers", ":", "_LOGGER", ".", "debug", "(", "\"Accept header absent and forced to application/json\"", ")", "request", ".", "headers", "[", "'Accept'", "]", "=", "'application/json'", "if", "content", "is", "not", "None", ":", "request", ".", "add_content", "(", "content", ")", "if", "form_content", ":", "request", ".", "add_formdata", "(", "form_content", ")", "return", "request" ]
Create ClientRequest object. :param str url: URL for the request. :param dict params: URL query parameters. :param dict headers: Headers :param dict form_content: Form content
[ "Create", "ClientRequest", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L99-L128
train
Azure/msrest-for-python
msrest/service_client.py
_ServiceClientCore.format_url
def format_url(self, url, **kwargs): # type: (str, Any) -> str """Format request URL with the client base URL, unless the supplied URL is already absolute. :param str url: The request URL to be formatted if necessary. """ url = url.format(**kwargs) parsed = urlparse(url) if not parsed.scheme or not parsed.netloc: url = url.lstrip('/') base = self.config.base_url.format(**kwargs).rstrip('/') url = urljoin(base + '/', url) return url
python
def format_url(self, url, **kwargs): # type: (str, Any) -> str """Format request URL with the client base URL, unless the supplied URL is already absolute. :param str url: The request URL to be formatted if necessary. """ url = url.format(**kwargs) parsed = urlparse(url) if not parsed.scheme or not parsed.netloc: url = url.lstrip('/') base = self.config.base_url.format(**kwargs).rstrip('/') url = urljoin(base + '/', url) return url
[ "def", "format_url", "(", "self", ",", "url", ",", "*", "*", "kwargs", ")", ":", "# type: (str, Any) -> str", "url", "=", "url", ".", "format", "(", "*", "*", "kwargs", ")", "parsed", "=", "urlparse", "(", "url", ")", "if", "not", "parsed", ".", "scheme", "or", "not", "parsed", ".", "netloc", ":", "url", "=", "url", ".", "lstrip", "(", "'/'", ")", "base", "=", "self", ".", "config", ".", "base_url", ".", "format", "(", "*", "*", "kwargs", ")", ".", "rstrip", "(", "'/'", ")", "url", "=", "urljoin", "(", "base", "+", "'/'", ",", "url", ")", "return", "url" ]
Format request URL with the client base URL, unless the supplied URL is already absolute. :param str url: The request URL to be formatted if necessary.
[ "Format", "request", "URL", "with", "the", "client", "base", "URL", "unless", "the", "supplied", "URL", "is", "already", "absolute", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L144-L157
train
Azure/msrest-for-python
msrest/service_client.py
_ServiceClientCore.get
def get(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a GET request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('GET', url, params, headers, content, form_content) request.method = 'GET' return request
python
def get(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a GET request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('GET', url, params, headers, content, form_content) request.method = 'GET' return request
[ "def", "get", "(", "self", ",", "url", ",", "params", "=", "None", ",", "headers", "=", "None", ",", "content", "=", "None", ",", "form_content", "=", "None", ")", ":", "# type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest", "request", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ",", "params", ",", "headers", ",", "content", ",", "form_content", ")", "request", ".", "method", "=", "'GET'", "return", "request" ]
Create a GET request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content
[ "Create", "a", "GET", "request", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L159-L170
train
Azure/msrest-for-python
msrest/service_client.py
_ServiceClientCore.put
def put(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a PUT request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('PUT', url, params, headers, content, form_content) return request
python
def put(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a PUT request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('PUT', url, params, headers, content, form_content) return request
[ "def", "put", "(", "self", ",", "url", ",", "params", "=", "None", ",", "headers", "=", "None", ",", "content", "=", "None", ",", "form_content", "=", "None", ")", ":", "# type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest", "request", "=", "self", ".", "_request", "(", "'PUT'", ",", "url", ",", "params", ",", "headers", ",", "content", ",", "form_content", ")", "return", "request" ]
Create a PUT request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content
[ "Create", "a", "PUT", "request", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L172-L182
train
Azure/msrest-for-python
msrest/service_client.py
ServiceClient.send_formdata
def send_formdata(self, request, headers=None, content=None, **config): """Send data as a multipart form-data request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. This method is deprecated, and shouldn't be used anymore. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. :param config: Any specific config overrides. """ request.headers = headers request.add_formdata(content) return self.send(request, **config)
python
def send_formdata(self, request, headers=None, content=None, **config): """Send data as a multipart form-data request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. This method is deprecated, and shouldn't be used anymore. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. :param config: Any specific config overrides. """ request.headers = headers request.add_formdata(content) return self.send(request, **config)
[ "def", "send_formdata", "(", "self", ",", "request", ",", "headers", "=", "None", ",", "content", "=", "None", ",", "*", "*", "config", ")", ":", "request", ".", "headers", "=", "headers", "request", ".", "add_formdata", "(", "content", ")", "return", "self", ".", "send", "(", "request", ",", "*", "*", "config", ")" ]
Send data as a multipart form-data request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. This method is deprecated, and shouldn't be used anymore. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. :param config: Any specific config overrides.
[ "Send", "data", "as", "a", "multipart", "form", "-", "data", "request", ".", "We", "only", "deal", "with", "file", "-", "like", "objects", "or", "strings", "at", "this", "point", ".", "The", "requests", "is", "not", "yet", "streamed", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L302-L316
train
Azure/msrest-for-python
msrest/service_client.py
ServiceClient.add_header
def add_header(self, header, value): # type: (str, str) -> None """Add a persistent header - this header will be applied to all requests sent during the current client session. .. deprecated:: 0.5.0 Use config.headers instead :param str header: The header name. :param str value: The header value. """ warnings.warn("Private attribute _client.add_header is deprecated. Use config.headers instead.", DeprecationWarning) self.config.headers[header] = value
python
def add_header(self, header, value): # type: (str, str) -> None """Add a persistent header - this header will be applied to all requests sent during the current client session. .. deprecated:: 0.5.0 Use config.headers instead :param str header: The header name. :param str value: The header value. """ warnings.warn("Private attribute _client.add_header is deprecated. Use config.headers instead.", DeprecationWarning) self.config.headers[header] = value
[ "def", "add_header", "(", "self", ",", "header", ",", "value", ")", ":", "# type: (str, str) -> None", "warnings", ".", "warn", "(", "\"Private attribute _client.add_header is deprecated. Use config.headers instead.\"", ",", "DeprecationWarning", ")", "self", ".", "config", ".", "headers", "[", "header", "]", "=", "value" ]
Add a persistent header - this header will be applied to all requests sent during the current client session. .. deprecated:: 0.5.0 Use config.headers instead :param str header: The header name. :param str value: The header value.
[ "Add", "a", "persistent", "header", "-", "this", "header", "will", "be", "applied", "to", "all", "requests", "sent", "during", "the", "current", "client", "session", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/service_client.py#L375-L388
train
Azure/msrest-for-python
msrest/authentication.py
ApiKeyCredentials.signed_session
def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with ApiKey. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(ApiKeyCredentials, self).signed_session(session) session.headers.update(self.in_headers) try: # params is actually Union[bytes, MutableMapping[Text, Text]] session.params.update(self.in_query) # type: ignore except AttributeError: # requests.params can be bytes raise ValueError("session.params must be a dict to be used in ApiKeyCredentials") return session
python
def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with ApiKey. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(ApiKeyCredentials, self).signed_session(session) session.headers.update(self.in_headers) try: # params is actually Union[bytes, MutableMapping[Text, Text]] session.params.update(self.in_query) # type: ignore except AttributeError: # requests.params can be bytes raise ValueError("session.params must be a dict to be used in ApiKeyCredentials") return session
[ "def", "signed_session", "(", "self", ",", "session", "=", "None", ")", ":", "# type: (Optional[requests.Session]) -> requests.Session", "session", "=", "super", "(", "ApiKeyCredentials", ",", "self", ")", ".", "signed_session", "(", "session", ")", "session", ".", "headers", ".", "update", "(", "self", ".", "in_headers", ")", "try", ":", "# params is actually Union[bytes, MutableMapping[Text, Text]]", "session", ".", "params", ".", "update", "(", "self", ".", "in_query", ")", "# type: ignore", "except", "AttributeError", ":", "# requests.params can be bytes", "raise", "ValueError", "(", "\"session.params must be a dict to be used in ApiKeyCredentials\"", ")", "return", "session" ]
Create requests session with ApiKey. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session
[ "Create", "requests", "session", "with", "ApiKey", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/authentication.py#L197-L215
train
Azure/msrest-for-python
msrest/pipeline/universal.py
RawDeserializer.deserialize_from_text
def deserialize_from_text(cls, data, content_type=None): # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any """Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. """ if hasattr(data, 'read'): # Assume a stream data = cast(IO, data).read() if isinstance(data, bytes): data_as_str = data.decode(encoding='utf-8-sig') else: # Explain to mypy the correct type. data_as_str = cast(str, data) # Remove Byte Order Mark if present in string data_as_str = data_as_str.lstrip(_BOM) if content_type is None: return data if content_type in cls.JSON_MIMETYPES: try: return json.loads(data_as_str) except ValueError as err: raise DeserializationError("JSON is invalid: {}".format(err), err) elif "xml" in (content_type or []): try: return ET.fromstring(data_as_str) except ET.ParseError: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken # let's flow the initial exception def _json_attemp(data): try: return True, json.loads(data) except ValueError: return False, None # Don't care about this one success, json_result = _json_attemp(data) if success: return json_result # If i'm here, it's not JSON, it's not XML, let's scream # and raise the last context in this block (the XML exception) # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise_with_traceback(DeserializationError, "XML is invalid") raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
python
def deserialize_from_text(cls, data, content_type=None): # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any """Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. """ if hasattr(data, 'read'): # Assume a stream data = cast(IO, data).read() if isinstance(data, bytes): data_as_str = data.decode(encoding='utf-8-sig') else: # Explain to mypy the correct type. data_as_str = cast(str, data) # Remove Byte Order Mark if present in string data_as_str = data_as_str.lstrip(_BOM) if content_type is None: return data if content_type in cls.JSON_MIMETYPES: try: return json.loads(data_as_str) except ValueError as err: raise DeserializationError("JSON is invalid: {}".format(err), err) elif "xml" in (content_type or []): try: return ET.fromstring(data_as_str) except ET.ParseError: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken # let's flow the initial exception def _json_attemp(data): try: return True, json.loads(data) except ValueError: return False, None # Don't care about this one success, json_result = _json_attemp(data) if success: return json_result # If i'm here, it's not JSON, it's not XML, let's scream # and raise the last context in this block (the XML exception) # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise_with_traceback(DeserializationError, "XML is invalid") raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
[ "def", "deserialize_from_text", "(", "cls", ",", "data", ",", "content_type", "=", "None", ")", ":", "# type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any", "if", "hasattr", "(", "data", ",", "'read'", ")", ":", "# Assume a stream", "data", "=", "cast", "(", "IO", ",", "data", ")", ".", "read", "(", ")", "if", "isinstance", "(", "data", ",", "bytes", ")", ":", "data_as_str", "=", "data", ".", "decode", "(", "encoding", "=", "'utf-8-sig'", ")", "else", ":", "# Explain to mypy the correct type.", "data_as_str", "=", "cast", "(", "str", ",", "data", ")", "# Remove Byte Order Mark if present in string", "data_as_str", "=", "data_as_str", ".", "lstrip", "(", "_BOM", ")", "if", "content_type", "is", "None", ":", "return", "data", "if", "content_type", "in", "cls", ".", "JSON_MIMETYPES", ":", "try", ":", "return", "json", ".", "loads", "(", "data_as_str", ")", "except", "ValueError", "as", "err", ":", "raise", "DeserializationError", "(", "\"JSON is invalid: {}\"", ".", "format", "(", "err", ")", ",", "err", ")", "elif", "\"xml\"", "in", "(", "content_type", "or", "[", "]", ")", ":", "try", ":", "return", "ET", ".", "fromstring", "(", "data_as_str", ")", "except", "ET", ".", "ParseError", ":", "# It might be because the server has an issue, and returned JSON with", "# content-type XML....", "# So let's try a JSON load, and if it's still broken", "# let's flow the initial exception", "def", "_json_attemp", "(", "data", ")", ":", "try", ":", "return", "True", ",", "json", ".", "loads", "(", "data", ")", "except", "ValueError", ":", "return", "False", ",", "None", "# Don't care about this one", "success", ",", "json_result", "=", "_json_attemp", "(", "data", ")", "if", "success", ":", "return", "json_result", "# If i'm here, it's not JSON, it's not XML, let's scream", "# and raise the last context in this block (the XML exception)", "# The function hack is because Py2.7 messes up with exception", "# context otherwise.", "_LOGGER", ".", "critical", "(", "\"Wasn't XML not JSON, failing\"", ")", "raise_with_traceback", "(", "DeserializationError", ",", "\"XML is invalid\"", ")", "raise", "DeserializationError", "(", "\"Cannot deserialize content-type: {}\"", ".", "format", "(", "content_type", ")", ")" ]
Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type.
[ "Decode", "data", "according", "to", "content", "-", "type", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/pipeline/universal.py#L140-L195
train
Azure/msrest-for-python
msrest/pipeline/universal.py
RawDeserializer.deserialize_from_http_generics
def deserialize_from_http_generics(cls, body_bytes, headers): # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any """Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" """ # Try to use content-type from headers if available content_type = None if 'content-type' in headers: content_type = headers['content-type'].split(";")[0].strip().lower() # Ouch, this server did not declare what it sent... # Let's guess it's JSON... # Also, since Autorest was considering that an empty body was a valid JSON, # need that test as well.... else: content_type = "application/json" if body_bytes: return cls.deserialize_from_text(body_bytes, content_type) return None
python
def deserialize_from_http_generics(cls, body_bytes, headers): # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any """Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" """ # Try to use content-type from headers if available content_type = None if 'content-type' in headers: content_type = headers['content-type'].split(";")[0].strip().lower() # Ouch, this server did not declare what it sent... # Let's guess it's JSON... # Also, since Autorest was considering that an empty body was a valid JSON, # need that test as well.... else: content_type = "application/json" if body_bytes: return cls.deserialize_from_text(body_bytes, content_type) return None
[ "def", "deserialize_from_http_generics", "(", "cls", ",", "body_bytes", ",", "headers", ")", ":", "# type: (Optional[Union[AnyStr, IO]], Mapping) -> Any", "# Try to use content-type from headers if available", "content_type", "=", "None", "if", "'content-type'", "in", "headers", ":", "content_type", "=", "headers", "[", "'content-type'", "]", ".", "split", "(", "\";\"", ")", "[", "0", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "# Ouch, this server did not declare what it sent...", "# Let's guess it's JSON...", "# Also, since Autorest was considering that an empty body was a valid JSON,", "# need that test as well....", "else", ":", "content_type", "=", "\"application/json\"", "if", "body_bytes", ":", "return", "cls", ".", "deserialize_from_text", "(", "body_bytes", ",", "content_type", ")", "return", "None" ]
Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type"
[ "Deserialize", "from", "HTTP", "response", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/pipeline/universal.py#L198-L219
train
Azure/msrest-for-python
msrest/pipeline/universal.py
RawDeserializer.on_response
def on_response(self, request, response, **kwargs): # type: (Request, Response, Any) -> None """Extract data from the body of a REST response object. This will load the entire payload in memory. Will follow Content-Type to parse. We assume everything is UTF8 (BOM acceptable). :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML """ # If response was asked as stream, do NOT read anything and quit now if kwargs.get("stream", True): return http_response = response.http_response response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( http_response.text(), http_response.headers )
python
def on_response(self, request, response, **kwargs): # type: (Request, Response, Any) -> None """Extract data from the body of a REST response object. This will load the entire payload in memory. Will follow Content-Type to parse. We assume everything is UTF8 (BOM acceptable). :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML """ # If response was asked as stream, do NOT read anything and quit now if kwargs.get("stream", True): return http_response = response.http_response response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( http_response.text(), http_response.headers )
[ "def", "on_response", "(", "self", ",", "request", ",", "response", ",", "*", "*", "kwargs", ")", ":", "# type: (Request, Response, Any) -> None", "# If response was asked as stream, do NOT read anything and quit now", "if", "kwargs", ".", "get", "(", "\"stream\"", ",", "True", ")", ":", "return", "http_response", "=", "response", ".", "http_response", "response", ".", "context", "[", "self", ".", "CONTEXT_NAME", "]", "=", "self", ".", "deserialize_from_http_generics", "(", "http_response", ".", "text", "(", ")", ",", "http_response", ".", "headers", ")" ]
Extract data from the body of a REST response object. This will load the entire payload in memory. Will follow Content-Type to parse. We assume everything is UTF8 (BOM acceptable). :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML
[ "Extract", "data", "from", "the", "body", "of", "a", "REST", "response", "object", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/pipeline/universal.py#L221-L245
train
Azure/msrest-for-python
msrest/pipeline/__init__.py
ClientRawResponse.add_headers
def add_headers(self, header_dict): # type: (Dict[str, str]) -> None """Deserialize a specific header. :param dict header_dict: A dictionary containing the name of the header and the type to deserialize to. """ if not self.response: return for name, data_type in header_dict.items(): value = self.response.headers.get(name) value = self._deserialize(data_type, value) self.headers[name] = value
python
def add_headers(self, header_dict): # type: (Dict[str, str]) -> None """Deserialize a specific header. :param dict header_dict: A dictionary containing the name of the header and the type to deserialize to. """ if not self.response: return for name, data_type in header_dict.items(): value = self.response.headers.get(name) value = self._deserialize(data_type, value) self.headers[name] = value
[ "def", "add_headers", "(", "self", ",", "header_dict", ")", ":", "# type: (Dict[str, str]) -> None", "if", "not", "self", ".", "response", ":", "return", "for", "name", ",", "data_type", "in", "header_dict", ".", "items", "(", ")", ":", "value", "=", "self", ".", "response", ".", "headers", ".", "get", "(", "name", ")", "value", "=", "self", ".", "_deserialize", "(", "data_type", ",", "value", ")", "self", ".", "headers", "[", "name", "]", "=", "value" ]
Deserialize a specific header. :param dict header_dict: A dictionary containing the name of the header and the type to deserialize to.
[ "Deserialize", "a", "specific", "header", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/pipeline/__init__.py#L293-L305
train
Azure/msrest-for-python
msrest/http_logger.py
log_request
def log_request(_, request, *_args, **_kwargs): # type: (Any, ClientRequest, str, str) -> None """Log a client request. :param _: Unused in current version (will be None) :param requests.Request request: The request object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return try: _LOGGER.debug("Request URL: %r", request.url) _LOGGER.debug("Request method: %r", request.method) _LOGGER.debug("Request headers:") for header, value in request.headers.items(): if header.lower() == 'authorization': value = '*****' _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") # We don't want to log the binary data of a file upload. if isinstance(request.body, types.GeneratorType): _LOGGER.debug("File upload") else: _LOGGER.debug(str(request.body)) except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log request: %r", err)
python
def log_request(_, request, *_args, **_kwargs): # type: (Any, ClientRequest, str, str) -> None """Log a client request. :param _: Unused in current version (will be None) :param requests.Request request: The request object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return try: _LOGGER.debug("Request URL: %r", request.url) _LOGGER.debug("Request method: %r", request.method) _LOGGER.debug("Request headers:") for header, value in request.headers.items(): if header.lower() == 'authorization': value = '*****' _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") # We don't want to log the binary data of a file upload. if isinstance(request.body, types.GeneratorType): _LOGGER.debug("File upload") else: _LOGGER.debug(str(request.body)) except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log request: %r", err)
[ "def", "log_request", "(", "_", ",", "request", ",", "*", "_args", ",", "*", "*", "_kwargs", ")", ":", "# type: (Any, ClientRequest, str, str) -> None", "if", "not", "_LOGGER", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "return", "try", ":", "_LOGGER", ".", "debug", "(", "\"Request URL: %r\"", ",", "request", ".", "url", ")", "_LOGGER", ".", "debug", "(", "\"Request method: %r\"", ",", "request", ".", "method", ")", "_LOGGER", ".", "debug", "(", "\"Request headers:\"", ")", "for", "header", ",", "value", "in", "request", ".", "headers", ".", "items", "(", ")", ":", "if", "header", ".", "lower", "(", ")", "==", "'authorization'", ":", "value", "=", "'*****'", "_LOGGER", ".", "debug", "(", "\" %r: %r\"", ",", "header", ",", "value", ")", "_LOGGER", ".", "debug", "(", "\"Request body:\"", ")", "# We don't want to log the binary data of a file upload.", "if", "isinstance", "(", "request", ".", "body", ",", "types", ".", "GeneratorType", ")", ":", "_LOGGER", ".", "debug", "(", "\"File upload\"", ")", "else", ":", "_LOGGER", ".", "debug", "(", "str", "(", "request", ".", "body", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "debug", "(", "\"Failed to log request: %r\"", ",", "err", ")" ]
Log a client request. :param _: Unused in current version (will be None) :param requests.Request request: The request object.
[ "Log", "a", "client", "request", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/http_logger.py#L39-L65
train
Azure/msrest-for-python
msrest/http_logger.py
log_response
def log_response(_, _request, response, *_args, **kwargs): # type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse] """Log a server response. :param _: Unused in current version (will be None) :param requests.Request request: The request object. :param requests.Response response: The response object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return None try: _LOGGER.debug("Response status: %r", response.status_code) _LOGGER.debug("Response headers:") for res_header, value in response.headers.items(): _LOGGER.debug(" %r: %r", res_header, value) # We don't want to log binary data if the response is a file. _LOGGER.debug("Response content:") pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) header = response.headers.get('content-disposition') if header and pattern.match(header): filename = header.partition('=')[2] _LOGGER.debug("File attachments: %s", filename) elif response.headers.get("content-type", "").endswith("octet-stream"): _LOGGER.debug("Body contains binary data.") elif response.headers.get("content-type", "").startswith("image"): _LOGGER.debug("Body contains image data.") else: if kwargs.get('stream', False): _LOGGER.debug("Body is streamable") else: _LOGGER.debug(response.text()) return response except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log response: %s", repr(err)) return response
python
def log_response(_, _request, response, *_args, **kwargs): # type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse] """Log a server response. :param _: Unused in current version (will be None) :param requests.Request request: The request object. :param requests.Response response: The response object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return None try: _LOGGER.debug("Response status: %r", response.status_code) _LOGGER.debug("Response headers:") for res_header, value in response.headers.items(): _LOGGER.debug(" %r: %r", res_header, value) # We don't want to log binary data if the response is a file. _LOGGER.debug("Response content:") pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) header = response.headers.get('content-disposition') if header and pattern.match(header): filename = header.partition('=')[2] _LOGGER.debug("File attachments: %s", filename) elif response.headers.get("content-type", "").endswith("octet-stream"): _LOGGER.debug("Body contains binary data.") elif response.headers.get("content-type", "").startswith("image"): _LOGGER.debug("Body contains image data.") else: if kwargs.get('stream', False): _LOGGER.debug("Body is streamable") else: _LOGGER.debug(response.text()) return response except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log response: %s", repr(err)) return response
[ "def", "log_response", "(", "_", ",", "_request", ",", "response", ",", "*", "_args", ",", "*", "*", "kwargs", ")", ":", "# type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse]", "if", "not", "_LOGGER", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "return", "None", "try", ":", "_LOGGER", ".", "debug", "(", "\"Response status: %r\"", ",", "response", ".", "status_code", ")", "_LOGGER", ".", "debug", "(", "\"Response headers:\"", ")", "for", "res_header", ",", "value", "in", "response", ".", "headers", ".", "items", "(", ")", ":", "_LOGGER", ".", "debug", "(", "\" %r: %r\"", ",", "res_header", ",", "value", ")", "# We don't want to log binary data if the response is a file.", "_LOGGER", ".", "debug", "(", "\"Response content:\"", ")", "pattern", "=", "re", ".", "compile", "(", "r'attachment; ?filename=[\"\\w.]+'", ",", "re", ".", "IGNORECASE", ")", "header", "=", "response", ".", "headers", ".", "get", "(", "'content-disposition'", ")", "if", "header", "and", "pattern", ".", "match", "(", "header", ")", ":", "filename", "=", "header", ".", "partition", "(", "'='", ")", "[", "2", "]", "_LOGGER", ".", "debug", "(", "\"File attachments: %s\"", ",", "filename", ")", "elif", "response", ".", "headers", ".", "get", "(", "\"content-type\"", ",", "\"\"", ")", ".", "endswith", "(", "\"octet-stream\"", ")", ":", "_LOGGER", ".", "debug", "(", "\"Body contains binary data.\"", ")", "elif", "response", ".", "headers", ".", "get", "(", "\"content-type\"", ",", "\"\"", ")", ".", "startswith", "(", "\"image\"", ")", ":", "_LOGGER", ".", "debug", "(", "\"Body contains image data.\"", ")", "else", ":", "if", "kwargs", ".", "get", "(", "'stream'", ",", "False", ")", ":", "_LOGGER", ".", "debug", "(", "\"Body is streamable\"", ")", "else", ":", "_LOGGER", ".", "debug", "(", "response", ".", "text", "(", ")", ")", "return", "response", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "debug", "(", "\"Failed to log response: %s\"", ",", "repr", "(", "err", ")", ")", "return", "response" ]
Log a server response. :param _: Unused in current version (will be None) :param requests.Request request: The request object. :param requests.Response response: The response object.
[ "Log", "a", "server", "response", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/http_logger.py#L68-L105
train
Azure/msrest-for-python
msrest/universal_http/__init__.py
HTTPSenderConfiguration._clear_config
def _clear_config(self): # type: () -> None """Clearout config object in memory.""" for section in self._config.sections(): self._config.remove_section(section)
python
def _clear_config(self): # type: () -> None """Clearout config object in memory.""" for section in self._config.sections(): self._config.remove_section(section)
[ "def", "_clear_config", "(", "self", ")", ":", "# type: () -> None", "for", "section", "in", "self", ".", "_config", ".", "sections", "(", ")", ":", "self", ".", "_config", ".", "remove_section", "(", "section", ")" ]
Clearout config object in memory.
[ "Clearout", "config", "object", "in", "memory", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/__init__.py#L120-L124
train
Azure/msrest-for-python
msrest/universal_http/__init__.py
ClientRequest.format_parameters
def format_parameters(self, params): # type: (Dict[str, str]) -> None """Format parameters into a valid query string. It's assumed all parameters have already been quoted as valid URL strings. :param dict params: A dictionary of parameters. """ query = urlparse(self.url).query if query: self.url = self.url.partition('?')[0] existing_params = { p[0]: p[-1] for p in [p.partition('=') for p in query.split('&')] } params.update(existing_params) query_params = ["{}={}".format(k, v) for k, v in params.items()] query = '?' + '&'.join(query_params) self.url = self.url + query
python
def format_parameters(self, params): # type: (Dict[str, str]) -> None """Format parameters into a valid query string. It's assumed all parameters have already been quoted as valid URL strings. :param dict params: A dictionary of parameters. """ query = urlparse(self.url).query if query: self.url = self.url.partition('?')[0] existing_params = { p[0]: p[-1] for p in [p.partition('=') for p in query.split('&')] } params.update(existing_params) query_params = ["{}={}".format(k, v) for k, v in params.items()] query = '?' + '&'.join(query_params) self.url = self.url + query
[ "def", "format_parameters", "(", "self", ",", "params", ")", ":", "# type: (Dict[str, str]) -> None", "query", "=", "urlparse", "(", "self", ".", "url", ")", ".", "query", "if", "query", ":", "self", ".", "url", "=", "self", ".", "url", ".", "partition", "(", "'?'", ")", "[", "0", "]", "existing_params", "=", "{", "p", "[", "0", "]", ":", "p", "[", "-", "1", "]", "for", "p", "in", "[", "p", ".", "partition", "(", "'='", ")", "for", "p", "in", "query", ".", "split", "(", "'&'", ")", "]", "}", "params", ".", "update", "(", "existing_params", ")", "query_params", "=", "[", "\"{}={}\"", ".", "format", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "params", ".", "items", "(", ")", "]", "query", "=", "'?'", "+", "'&'", ".", "join", "(", "query_params", ")", "self", ".", "url", "=", "self", ".", "url", "+", "query" ]
Format parameters into a valid query string. It's assumed all parameters have already been quoted as valid URL strings. :param dict params: A dictionary of parameters.
[ "Format", "parameters", "into", "a", "valid", "query", "string", ".", "It", "s", "assumed", "all", "parameters", "have", "already", "been", "quoted", "as", "valid", "URL", "strings", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/__init__.py#L231-L249
train
Azure/msrest-for-python
msrest/universal_http/__init__.py
ClientRequest._format_data
def _format_data(data): # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] """Format field data according to whether it is a stream or a string for a form-data request. :param data: The request field data. :type data: str or file-like object. """ if hasattr(data, 'read'): data = cast(IO, data) data_name = None try: if data.name[0] != '<' and data.name[-1] != '>': data_name = os.path.basename(data.name) except (AttributeError, TypeError): pass return (data_name, data, "application/octet-stream") return (None, cast(str, data))
python
def _format_data(data): # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] """Format field data according to whether it is a stream or a string for a form-data request. :param data: The request field data. :type data: str or file-like object. """ if hasattr(data, 'read'): data = cast(IO, data) data_name = None try: if data.name[0] != '<' and data.name[-1] != '>': data_name = os.path.basename(data.name) except (AttributeError, TypeError): pass return (data_name, data, "application/octet-stream") return (None, cast(str, data))
[ "def", "_format_data", "(", "data", ")", ":", "# type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]]", "if", "hasattr", "(", "data", ",", "'read'", ")", ":", "data", "=", "cast", "(", "IO", ",", "data", ")", "data_name", "=", "None", "try", ":", "if", "data", ".", "name", "[", "0", "]", "!=", "'<'", "and", "data", ".", "name", "[", "-", "1", "]", "!=", "'>'", ":", "data_name", "=", "os", ".", "path", ".", "basename", "(", "data", ".", "name", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "pass", "return", "(", "data_name", ",", "data", ",", "\"application/octet-stream\"", ")", "return", "(", "None", ",", "cast", "(", "str", ",", "data", ")", ")" ]
Format field data according to whether it is a stream or a string for a form-data request. :param data: The request field data. :type data: str or file-like object.
[ "Format", "field", "data", "according", "to", "whether", "it", "is", "a", "stream", "or", "a", "string", "for", "a", "form", "-", "data", "request", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/__init__.py#L275-L292
train
Azure/msrest-for-python
msrest/universal_http/__init__.py
ClientRequest.add_formdata
def add_formdata(self, content=None): # type: (Optional[Dict[str, str]]) -> None """Add data as a multipart form-data request to the request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. """ if content is None: content = {} content_type = self.headers.pop('Content-Type', None) if self.headers else None if content_type and content_type.lower() == 'application/x-www-form-urlencoded': # Do NOT use "add_content" that assumes input is JSON self.data = {f: d for f, d in content.items() if d is not None} else: # Assume "multipart/form-data" self.files = {f: self._format_data(d) for f, d in content.items() if d is not None}
python
def add_formdata(self, content=None): # type: (Optional[Dict[str, str]]) -> None """Add data as a multipart form-data request to the request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. """ if content is None: content = {} content_type = self.headers.pop('Content-Type', None) if self.headers else None if content_type and content_type.lower() == 'application/x-www-form-urlencoded': # Do NOT use "add_content" that assumes input is JSON self.data = {f: d for f, d in content.items() if d is not None} else: # Assume "multipart/form-data" self.files = {f: self._format_data(d) for f, d in content.items() if d is not None}
[ "def", "add_formdata", "(", "self", ",", "content", "=", "None", ")", ":", "# type: (Optional[Dict[str, str]]) -> None", "if", "content", "is", "None", ":", "content", "=", "{", "}", "content_type", "=", "self", ".", "headers", ".", "pop", "(", "'Content-Type'", ",", "None", ")", "if", "self", ".", "headers", "else", "None", "if", "content_type", "and", "content_type", ".", "lower", "(", ")", "==", "'application/x-www-form-urlencoded'", ":", "# Do NOT use \"add_content\" that assumes input is JSON", "self", ".", "data", "=", "{", "f", ":", "d", "for", "f", ",", "d", "in", "content", ".", "items", "(", ")", "if", "d", "is", "not", "None", "}", "else", ":", "# Assume \"multipart/form-data\"", "self", ".", "files", "=", "{", "f", ":", "self", ".", "_format_data", "(", "d", ")", "for", "f", ",", "d", "in", "content", ".", "items", "(", ")", "if", "d", "is", "not", "None", "}" ]
Add data as a multipart form-data request to the request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata.
[ "Add", "data", "as", "a", "multipart", "form", "-", "data", "request", "to", "the", "request", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/__init__.py#L294-L312
train
Azure/msrest-for-python
msrest/exceptions.py
raise_with_traceback
def raise_with_traceback(exception, message="", *args, **kwargs): # type: (Callable, str, Any, Any) -> None """Raise exception with a specified traceback. This MUST be called inside a "except" clause. :param Exception exception: Error type to be raised. :param str message: Message to include with error, empty by default. :param args: Any additional args to be included with exception. """ exc_type, exc_value, exc_traceback = sys.exc_info() # If not called inside a "except", exc_type will be None. Assume it will not happen exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) # type: ignore error = exception(exc_msg, *args, **kwargs) try: raise error.with_traceback(exc_traceback) except AttributeError: error.__traceback__ = exc_traceback raise error
python
def raise_with_traceback(exception, message="", *args, **kwargs): # type: (Callable, str, Any, Any) -> None """Raise exception with a specified traceback. This MUST be called inside a "except" clause. :param Exception exception: Error type to be raised. :param str message: Message to include with error, empty by default. :param args: Any additional args to be included with exception. """ exc_type, exc_value, exc_traceback = sys.exc_info() # If not called inside a "except", exc_type will be None. Assume it will not happen exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) # type: ignore error = exception(exc_msg, *args, **kwargs) try: raise error.with_traceback(exc_traceback) except AttributeError: error.__traceback__ = exc_traceback raise error
[ "def", "raise_with_traceback", "(", "exception", ",", "message", "=", "\"\"", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# type: (Callable, str, Any, Any) -> None", "exc_type", ",", "exc_value", ",", "exc_traceback", "=", "sys", ".", "exc_info", "(", ")", "# If not called inside a \"except\", exc_type will be None. Assume it will not happen", "exc_msg", "=", "\"{}, {}: {}\"", ".", "format", "(", "message", ",", "exc_type", ".", "__name__", ",", "exc_value", ")", "# type: ignore", "error", "=", "exception", "(", "exc_msg", ",", "*", "args", ",", "*", "*", "kwargs", ")", "try", ":", "raise", "error", ".", "with_traceback", "(", "exc_traceback", ")", "except", "AttributeError", ":", "error", ".", "__traceback__", "=", "exc_traceback", "raise", "error" ]
Raise exception with a specified traceback. This MUST be called inside a "except" clause. :param Exception exception: Error type to be raised. :param str message: Message to include with error, empty by default. :param args: Any additional args to be included with exception.
[ "Raise", "exception", "with", "a", "specified", "traceback", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/exceptions.py#L36-L54
train
Azure/msrest-for-python
msrest/universal_http/requests.py
_patch_redirect
def _patch_redirect(session): # type: (requests.Session) -> None """Whether redirect policy should be applied based on status code. HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. But requests does, to follow browser more than spec https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 This patches "requests" to be more HTTP compliant. Note that this is super dangerous, since technically this is not public API. """ def enforce_http_spec(resp, request): if resp.status_code in (301, 302) and \ request.method not in ['GET', 'HEAD']: return False return True redirect_logic = session.resolve_redirects def wrapped_redirect(resp, req, **kwargs): attempt = enforce_http_spec(resp, req) return redirect_logic(resp, req, **kwargs) if attempt else [] wrapped_redirect.is_msrest_patched = True # type: ignore session.resolve_redirects = wrapped_redirect
python
def _patch_redirect(session): # type: (requests.Session) -> None """Whether redirect policy should be applied based on status code. HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. But requests does, to follow browser more than spec https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 This patches "requests" to be more HTTP compliant. Note that this is super dangerous, since technically this is not public API. """ def enforce_http_spec(resp, request): if resp.status_code in (301, 302) and \ request.method not in ['GET', 'HEAD']: return False return True redirect_logic = session.resolve_redirects def wrapped_redirect(resp, req, **kwargs): attempt = enforce_http_spec(resp, req) return redirect_logic(resp, req, **kwargs) if attempt else [] wrapped_redirect.is_msrest_patched = True # type: ignore session.resolve_redirects = wrapped_redirect
[ "def", "_patch_redirect", "(", "session", ")", ":", "# type: (requests.Session) -> None", "def", "enforce_http_spec", "(", "resp", ",", "request", ")", ":", "if", "resp", ".", "status_code", "in", "(", "301", ",", "302", ")", "and", "request", ".", "method", "not", "in", "[", "'GET'", ",", "'HEAD'", "]", ":", "return", "False", "return", "True", "redirect_logic", "=", "session", ".", "resolve_redirects", "def", "wrapped_redirect", "(", "resp", ",", "req", ",", "*", "*", "kwargs", ")", ":", "attempt", "=", "enforce_http_spec", "(", "resp", ",", "req", ")", "return", "redirect_logic", "(", "resp", ",", "req", ",", "*", "*", "kwargs", ")", "if", "attempt", "else", "[", "]", "wrapped_redirect", ".", "is_msrest_patched", "=", "True", "# type: ignore", "session", ".", "resolve_redirects", "=", "wrapped_redirect" ]
Whether redirect policy should be applied based on status code. HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. But requests does, to follow browser more than spec https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 This patches "requests" to be more HTTP compliant. Note that this is super dangerous, since technically this is not public API.
[ "Whether", "redirect", "policy", "should", "be", "applied", "based", "on", "status", "code", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/requests.py#L145-L170
train
Azure/msrest-for-python
msrest/universal_http/requests.py
RequestsHTTPSender._init_session
def _init_session(self, session): # type: (requests.Session) -> None """Init session level configuration of requests. This is initialization I want to do once only on a session. """ _patch_redirect(session) # Change max_retries in current all installed adapters max_retries = self.config.retry_policy() for protocol in self._protocols: session.adapters[protocol].max_retries = max_retries
python
def _init_session(self, session): # type: (requests.Session) -> None """Init session level configuration of requests. This is initialization I want to do once only on a session. """ _patch_redirect(session) # Change max_retries in current all installed adapters max_retries = self.config.retry_policy() for protocol in self._protocols: session.adapters[protocol].max_retries = max_retries
[ "def", "_init_session", "(", "self", ",", "session", ")", ":", "# type: (requests.Session) -> None", "_patch_redirect", "(", "session", ")", "# Change max_retries in current all installed adapters", "max_retries", "=", "self", ".", "config", ".", "retry_policy", "(", ")", "for", "protocol", "in", "self", ".", "_protocols", ":", "session", ".", "adapters", "[", "protocol", "]", ".", "max_retries", "=", "max_retries" ]
Init session level configuration of requests. This is initialization I want to do once only on a session.
[ "Init", "session", "level", "configuration", "of", "requests", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/requests.py#L218-L229
train
Azure/msrest-for-python
msrest/universal_http/requests.py
RequestsHTTPSender._configure_send
def _configure_send(self, request, **kwargs): # type: (ClientRequest, Any) -> Dict[str, str] """Configure the kwargs to use with requests. See "send" for kwargs details. :param ClientRequest request: The request object to be sent. :returns: The requests.Session.request kwargs :rtype: dict[str,str] """ requests_kwargs = {} # type: Any session = kwargs.pop('session', self.session) # If custom session was not create here if session is not self.session: self._init_session(session) session.max_redirects = int(self.config.redirect_policy()) session.trust_env = bool(self.config.proxies.use_env_settings) # Initialize requests_kwargs with "config" value requests_kwargs.update(self.config.connection()) requests_kwargs['allow_redirects'] = bool(self.config.redirect_policy) requests_kwargs['headers'] = self.config.headers.copy() proxies = self.config.proxies() if proxies: requests_kwargs['proxies'] = proxies # Replace by operation level kwargs # We allow some of them, since some like stream or json are controled by msrest for key in kwargs: if key in self._REQUESTS_KWARGS: requests_kwargs[key] = kwargs[key] # Hooks. Deprecated, should be a policy def make_user_hook_cb(user_hook, session): def user_hook_cb(r, *args, **kwargs): kwargs.setdefault("msrest", {})['session'] = session return user_hook(r, *args, **kwargs) return user_hook_cb hooks = [] for user_hook in self.config.hooks: hooks.append(make_user_hook_cb(user_hook, self.session)) if hooks: requests_kwargs['hooks'] = {'response': hooks} # Configuration callback. Deprecated, should be a policy output_kwargs = self.config.session_configuration_callback( session, self.config, kwargs, **requests_kwargs ) if output_kwargs is not None: requests_kwargs = output_kwargs # If custom session was not create here if session is not self.session: requests_kwargs['session'] = session ### Autorest forced kwargs now ### # If Autorest needs this response to be streamable. True for compat. requests_kwargs['stream'] = kwargs.get('stream', True) if request.files: requests_kwargs['files'] = request.files elif request.data: requests_kwargs['data'] = request.data requests_kwargs['headers'].update(request.headers) return requests_kwargs
python
def _configure_send(self, request, **kwargs): # type: (ClientRequest, Any) -> Dict[str, str] """Configure the kwargs to use with requests. See "send" for kwargs details. :param ClientRequest request: The request object to be sent. :returns: The requests.Session.request kwargs :rtype: dict[str,str] """ requests_kwargs = {} # type: Any session = kwargs.pop('session', self.session) # If custom session was not create here if session is not self.session: self._init_session(session) session.max_redirects = int(self.config.redirect_policy()) session.trust_env = bool(self.config.proxies.use_env_settings) # Initialize requests_kwargs with "config" value requests_kwargs.update(self.config.connection()) requests_kwargs['allow_redirects'] = bool(self.config.redirect_policy) requests_kwargs['headers'] = self.config.headers.copy() proxies = self.config.proxies() if proxies: requests_kwargs['proxies'] = proxies # Replace by operation level kwargs # We allow some of them, since some like stream or json are controled by msrest for key in kwargs: if key in self._REQUESTS_KWARGS: requests_kwargs[key] = kwargs[key] # Hooks. Deprecated, should be a policy def make_user_hook_cb(user_hook, session): def user_hook_cb(r, *args, **kwargs): kwargs.setdefault("msrest", {})['session'] = session return user_hook(r, *args, **kwargs) return user_hook_cb hooks = [] for user_hook in self.config.hooks: hooks.append(make_user_hook_cb(user_hook, self.session)) if hooks: requests_kwargs['hooks'] = {'response': hooks} # Configuration callback. Deprecated, should be a policy output_kwargs = self.config.session_configuration_callback( session, self.config, kwargs, **requests_kwargs ) if output_kwargs is not None: requests_kwargs = output_kwargs # If custom session was not create here if session is not self.session: requests_kwargs['session'] = session ### Autorest forced kwargs now ### # If Autorest needs this response to be streamable. True for compat. requests_kwargs['stream'] = kwargs.get('stream', True) if request.files: requests_kwargs['files'] = request.files elif request.data: requests_kwargs['data'] = request.data requests_kwargs['headers'].update(request.headers) return requests_kwargs
[ "def", "_configure_send", "(", "self", ",", "request", ",", "*", "*", "kwargs", ")", ":", "# type: (ClientRequest, Any) -> Dict[str, str]", "requests_kwargs", "=", "{", "}", "# type: Any", "session", "=", "kwargs", ".", "pop", "(", "'session'", ",", "self", ".", "session", ")", "# If custom session was not create here", "if", "session", "is", "not", "self", ".", "session", ":", "self", ".", "_init_session", "(", "session", ")", "session", ".", "max_redirects", "=", "int", "(", "self", ".", "config", ".", "redirect_policy", "(", ")", ")", "session", ".", "trust_env", "=", "bool", "(", "self", ".", "config", ".", "proxies", ".", "use_env_settings", ")", "# Initialize requests_kwargs with \"config\" value", "requests_kwargs", ".", "update", "(", "self", ".", "config", ".", "connection", "(", ")", ")", "requests_kwargs", "[", "'allow_redirects'", "]", "=", "bool", "(", "self", ".", "config", ".", "redirect_policy", ")", "requests_kwargs", "[", "'headers'", "]", "=", "self", ".", "config", ".", "headers", ".", "copy", "(", ")", "proxies", "=", "self", ".", "config", ".", "proxies", "(", ")", "if", "proxies", ":", "requests_kwargs", "[", "'proxies'", "]", "=", "proxies", "# Replace by operation level kwargs", "# We allow some of them, since some like stream or json are controled by msrest", "for", "key", "in", "kwargs", ":", "if", "key", "in", "self", ".", "_REQUESTS_KWARGS", ":", "requests_kwargs", "[", "key", "]", "=", "kwargs", "[", "key", "]", "# Hooks. Deprecated, should be a policy", "def", "make_user_hook_cb", "(", "user_hook", ",", "session", ")", ":", "def", "user_hook_cb", "(", "r", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "\"msrest\"", ",", "{", "}", ")", "[", "'session'", "]", "=", "session", "return", "user_hook", "(", "r", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "user_hook_cb", "hooks", "=", "[", "]", "for", "user_hook", "in", "self", ".", "config", ".", "hooks", ":", "hooks", ".", "append", "(", "make_user_hook_cb", "(", "user_hook", ",", "self", ".", "session", ")", ")", "if", "hooks", ":", "requests_kwargs", "[", "'hooks'", "]", "=", "{", "'response'", ":", "hooks", "}", "# Configuration callback. Deprecated, should be a policy", "output_kwargs", "=", "self", ".", "config", ".", "session_configuration_callback", "(", "session", ",", "self", ".", "config", ",", "kwargs", ",", "*", "*", "requests_kwargs", ")", "if", "output_kwargs", "is", "not", "None", ":", "requests_kwargs", "=", "output_kwargs", "# If custom session was not create here", "if", "session", "is", "not", "self", ".", "session", ":", "requests_kwargs", "[", "'session'", "]", "=", "session", "### Autorest forced kwargs now ###", "# If Autorest needs this response to be streamable. True for compat.", "requests_kwargs", "[", "'stream'", "]", "=", "kwargs", ".", "get", "(", "'stream'", ",", "True", ")", "if", "request", ".", "files", ":", "requests_kwargs", "[", "'files'", "]", "=", "request", ".", "files", "elif", "request", ".", "data", ":", "requests_kwargs", "[", "'data'", "]", "=", "request", ".", "data", "requests_kwargs", "[", "'headers'", "]", ".", "update", "(", "request", ".", "headers", ")", "return", "requests_kwargs" ]
Configure the kwargs to use with requests. See "send" for kwargs details. :param ClientRequest request: The request object to be sent. :returns: The requests.Session.request kwargs :rtype: dict[str,str]
[ "Configure", "the", "kwargs", "to", "use", "with", "requests", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/universal_http/requests.py#L231-L305
train
Azure/msrest-for-python
msrest/serialization.py
full_restapi_key_transformer
def full_restapi_key_transformer(key, attr_desc, value): """A key transformer that returns the full RestAPI key path. :param str _: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: A list of keys using RestAPI syntax. """ keys = _FLATTEN.split(attr_desc['key']) return ([_decode_attribute_map_key(k) for k in keys], value)
python
def full_restapi_key_transformer(key, attr_desc, value): """A key transformer that returns the full RestAPI key path. :param str _: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: A list of keys using RestAPI syntax. """ keys = _FLATTEN.split(attr_desc['key']) return ([_decode_attribute_map_key(k) for k in keys], value)
[ "def", "full_restapi_key_transformer", "(", "key", ",", "attr_desc", ",", "value", ")", ":", "keys", "=", "_FLATTEN", ".", "split", "(", "attr_desc", "[", "'key'", "]", ")", "return", "(", "[", "_decode_attribute_map_key", "(", "k", ")", "for", "k", "in", "keys", "]", ",", "value", ")" ]
A key transformer that returns the full RestAPI key path. :param str _: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: A list of keys using RestAPI syntax.
[ "A", "key", "transformer", "that", "returns", "the", "full", "RestAPI", "key", "path", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L98-L107
train
Azure/msrest-for-python
msrest/serialization.py
last_restapi_key_transformer
def last_restapi_key_transformer(key, attr_desc, value): """A key transformer that returns the last RestAPI key. :param str key: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: The last RestAPI key. """ key, value = full_restapi_key_transformer(key, attr_desc, value) return (key[-1], value)
python
def last_restapi_key_transformer(key, attr_desc, value): """A key transformer that returns the last RestAPI key. :param str key: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: The last RestAPI key. """ key, value = full_restapi_key_transformer(key, attr_desc, value) return (key[-1], value)
[ "def", "last_restapi_key_transformer", "(", "key", ",", "attr_desc", ",", "value", ")", ":", "key", ",", "value", "=", "full_restapi_key_transformer", "(", "key", ",", "attr_desc", ",", "value", ")", "return", "(", "key", "[", "-", "1", "]", ",", "value", ")" ]
A key transformer that returns the last RestAPI key. :param str key: The attribute name :param dict attr_desc: The attribute metadata :param object value: The value :returns: The last RestAPI key.
[ "A", "key", "transformer", "that", "returns", "the", "last", "RestAPI", "key", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L109-L118
train
Azure/msrest-for-python
msrest/serialization.py
_create_xml_node
def _create_xml_node(tag, prefix=None, ns=None): """Create a XML node.""" if prefix and ns: ET.register_namespace(prefix, ns) if ns: return ET.Element("{"+ns+"}"+tag) else: return ET.Element(tag)
python
def _create_xml_node(tag, prefix=None, ns=None): """Create a XML node.""" if prefix and ns: ET.register_namespace(prefix, ns) if ns: return ET.Element("{"+ns+"}"+tag) else: return ET.Element(tag)
[ "def", "_create_xml_node", "(", "tag", ",", "prefix", "=", "None", ",", "ns", "=", "None", ")", ":", "if", "prefix", "and", "ns", ":", "ET", ".", "register_namespace", "(", "prefix", ",", "ns", ")", "if", "ns", ":", "return", "ET", ".", "Element", "(", "\"{\"", "+", "ns", "+", "\"}\"", "+", "tag", ")", "else", ":", "return", "ET", ".", "Element", "(", "tag", ")" ]
Create a XML node.
[ "Create", "a", "XML", "node", "." ]
0732bc90bdb290e5f58c675ffdd7dbfa9acefc93
https://github.com/Azure/msrest-for-python/blob/0732bc90bdb290e5f58c675ffdd7dbfa9acefc93/msrest/serialization.py#L134-L141
train