repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
metacloud/gilt | gilt/config.py | config | def config(filename):
"""
Construct `Config` object and return a list.
:parse filename: A string containing the path to YAML file.
:return: list
"""
Config = collections.namedtuple('Config', [
'git',
'lock_file',
'version',
'name',
'src',
'dst',
'files',
'post_commands',
])
return [Config(**d) for d in _get_config_generator(filename)] | python | def config(filename):
"""
Construct `Config` object and return a list.
:parse filename: A string containing the path to YAML file.
:return: list
"""
Config = collections.namedtuple('Config', [
'git',
'lock_file',
'version',
'name',
'src',
'dst',
'files',
'post_commands',
])
return [Config(**d) for d in _get_config_generator(filename)] | [
"def",
"config",
"(",
"filename",
")",
":",
"Config",
"=",
"collections",
".",
"namedtuple",
"(",
"'Config'",
",",
"[",
"'git'",
",",
"'lock_file'",
",",
"'version'",
",",
"'name'",
",",
"'src'",
",",
"'dst'",
",",
"'files'",
",",
"'post_commands'",
",",
"]",
")",
"return",
"[",
"Config",
"(",
"*",
"*",
"d",
")",
"for",
"d",
"in",
"_get_config_generator",
"(",
"filename",
")",
"]"
]
| Construct `Config` object and return a list.
:parse filename: A string containing the path to YAML file.
:return: list | [
"Construct",
"Config",
"object",
"and",
"return",
"a",
"list",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/config.py#L41-L59 | train |
metacloud/gilt | gilt/config.py | _get_files_config | def _get_files_config(src_dir, files_list):
"""
Construct `FileConfig` object and return a list.
:param src_dir: A string containing the source directory.
:param files_list: A list of dicts containing the src/dst mapping of files
to overlay.
:return: list
"""
FilesConfig = collections.namedtuple('FilesConfig',
['src', 'dst', 'post_commands'])
return [
FilesConfig(**d) for d in _get_files_generator(src_dir, files_list)
] | python | def _get_files_config(src_dir, files_list):
"""
Construct `FileConfig` object and return a list.
:param src_dir: A string containing the source directory.
:param files_list: A list of dicts containing the src/dst mapping of files
to overlay.
:return: list
"""
FilesConfig = collections.namedtuple('FilesConfig',
['src', 'dst', 'post_commands'])
return [
FilesConfig(**d) for d in _get_files_generator(src_dir, files_list)
] | [
"def",
"_get_files_config",
"(",
"src_dir",
",",
"files_list",
")",
":",
"FilesConfig",
"=",
"collections",
".",
"namedtuple",
"(",
"'FilesConfig'",
",",
"[",
"'src'",
",",
"'dst'",
",",
"'post_commands'",
"]",
")",
"return",
"[",
"FilesConfig",
"(",
"*",
"*",
"d",
")",
"for",
"d",
"in",
"_get_files_generator",
"(",
"src_dir",
",",
"files_list",
")",
"]"
]
| Construct `FileConfig` object and return a list.
:param src_dir: A string containing the source directory.
:param files_list: A list of dicts containing the src/dst mapping of files
to overlay.
:return: list | [
"Construct",
"FileConfig",
"object",
"and",
"return",
"a",
"list",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/config.py#L62-L76 | train |
metacloud/gilt | gilt/config.py | _get_config | def _get_config(filename):
"""
Parse the provided YAML file and return a dict.
:parse filename: A string containing the path to YAML file.
:return: dict
"""
i = interpolation.Interpolator(interpolation.TemplateWithDefaults,
os.environ)
with open(filename, 'r') as stream:
try:
interpolated_config = i.interpolate(stream.read())
return yaml.safe_load(interpolated_config)
except yaml.parser.ParserError as e:
msg = 'Error parsing gilt config: {0}'.format(e)
raise ParseError(msg) | python | def _get_config(filename):
"""
Parse the provided YAML file and return a dict.
:parse filename: A string containing the path to YAML file.
:return: dict
"""
i = interpolation.Interpolator(interpolation.TemplateWithDefaults,
os.environ)
with open(filename, 'r') as stream:
try:
interpolated_config = i.interpolate(stream.read())
return yaml.safe_load(interpolated_config)
except yaml.parser.ParserError as e:
msg = 'Error parsing gilt config: {0}'.format(e)
raise ParseError(msg) | [
"def",
"_get_config",
"(",
"filename",
")",
":",
"i",
"=",
"interpolation",
".",
"Interpolator",
"(",
"interpolation",
".",
"TemplateWithDefaults",
",",
"os",
".",
"environ",
")",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"stream",
":",
"try",
":",
"interpolated_config",
"=",
"i",
".",
"interpolate",
"(",
"stream",
".",
"read",
"(",
")",
")",
"return",
"yaml",
".",
"safe_load",
"(",
"interpolated_config",
")",
"except",
"yaml",
".",
"parser",
".",
"ParserError",
"as",
"e",
":",
"msg",
"=",
"'Error parsing gilt config: {0}'",
".",
"format",
"(",
"e",
")",
"raise",
"ParseError",
"(",
"msg",
")"
]
| Parse the provided YAML file and return a dict.
:parse filename: A string containing the path to YAML file.
:return: dict | [
"Parse",
"the",
"provided",
"YAML",
"file",
"and",
"return",
"a",
"dict",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/config.py#L126-L142 | train |
metacloud/gilt | gilt/config.py | _get_dst_dir | def _get_dst_dir(dst_dir):
"""
Prefix the provided string with working directory and return a
str.
:param dst_dir: A string to be prefixed with the working dir.
:return: str
"""
wd = os.getcwd()
_makedirs(dst_dir)
return os.path.join(wd, dst_dir) | python | def _get_dst_dir(dst_dir):
"""
Prefix the provided string with working directory and return a
str.
:param dst_dir: A string to be prefixed with the working dir.
:return: str
"""
wd = os.getcwd()
_makedirs(dst_dir)
return os.path.join(wd, dst_dir) | [
"def",
"_get_dst_dir",
"(",
"dst_dir",
")",
":",
"wd",
"=",
"os",
".",
"getcwd",
"(",
")",
"_makedirs",
"(",
"dst_dir",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"wd",
",",
"dst_dir",
")"
]
| Prefix the provided string with working directory and return a
str.
:param dst_dir: A string to be prefixed with the working dir.
:return: str | [
"Prefix",
"the",
"provided",
"string",
"with",
"working",
"directory",
"and",
"return",
"a",
"str",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/config.py#L145-L156 | train |
metacloud/gilt | gilt/config.py | _makedirs | def _makedirs(path):
"""
Create a base directory of the provided path and return None.
:param path: A string containing a path to be deconstructed and basedir
created.
:return: None
"""
dirname, _ = os.path.split(path)
try:
os.makedirs(dirname)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise | python | def _makedirs(path):
"""
Create a base directory of the provided path and return None.
:param path: A string containing a path to be deconstructed and basedir
created.
:return: None
"""
dirname, _ = os.path.split(path)
try:
os.makedirs(dirname)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise | [
"def",
"_makedirs",
"(",
"path",
")",
":",
"dirname",
",",
"_",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"try",
":",
"os",
".",
"makedirs",
"(",
"dirname",
")",
"except",
"OSError",
"as",
"exc",
":",
"if",
"exc",
".",
"errno",
"==",
"errno",
".",
"EEXIST",
":",
"pass",
"else",
":",
"raise"
]
| Create a base directory of the provided path and return None.
:param path: A string containing a path to be deconstructed and basedir
created.
:return: None | [
"Create",
"a",
"base",
"directory",
"of",
"the",
"provided",
"path",
"and",
"return",
"None",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/config.py#L193-L208 | train |
metacloud/gilt | gilt/shell.py | main | def main(ctx, config, debug): # pragma: no cover
""" gilt - A GIT layering tool. """
ctx.obj = {}
ctx.obj['args'] = {}
ctx.obj['args']['debug'] = debug
ctx.obj['args']['config'] = config | python | def main(ctx, config, debug): # pragma: no cover
""" gilt - A GIT layering tool. """
ctx.obj = {}
ctx.obj['args'] = {}
ctx.obj['args']['debug'] = debug
ctx.obj['args']['config'] = config | [
"def",
"main",
"(",
"ctx",
",",
"config",
",",
"debug",
")",
":",
"# pragma: no cover",
"ctx",
".",
"obj",
"=",
"{",
"}",
"ctx",
".",
"obj",
"[",
"'args'",
"]",
"=",
"{",
"}",
"ctx",
".",
"obj",
"[",
"'args'",
"]",
"[",
"'debug'",
"]",
"=",
"debug",
"ctx",
".",
"obj",
"[",
"'args'",
"]",
"[",
"'config'",
"]",
"=",
"config"
]
| gilt - A GIT layering tool. | [
"gilt",
"-",
"A",
"GIT",
"layering",
"tool",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/shell.py#L50-L55 | train |
metacloud/gilt | gilt/shell.py | overlay | def overlay(ctx): # pragma: no cover
""" Install gilt dependencies """
args = ctx.obj.get('args')
filename = args.get('config')
debug = args.get('debug')
_setup(filename)
for c in config.config(filename):
with fasteners.InterProcessLock(c.lock_file):
util.print_info('{}:'.format(c.name))
if not os.path.exists(c.src):
git.clone(c.name, c.git, c.src, debug=debug)
if c.dst:
git.extract(c.src, c.dst, c.version, debug=debug)
post_commands = {c.dst: c.post_commands}
else:
git.overlay(c.src, c.files, c.version, debug=debug)
post_commands = {
conf.dst: conf.post_commands
for conf in c.files
}
# Run post commands if any.
for dst, commands in post_commands.items():
for command in commands:
msg = ' - running `{}` in {}'.format(command, dst)
util.print_info(msg)
cmd = util.build_sh_cmd(command, cwd=dst)
util.run_command(cmd, debug=debug) | python | def overlay(ctx): # pragma: no cover
""" Install gilt dependencies """
args = ctx.obj.get('args')
filename = args.get('config')
debug = args.get('debug')
_setup(filename)
for c in config.config(filename):
with fasteners.InterProcessLock(c.lock_file):
util.print_info('{}:'.format(c.name))
if not os.path.exists(c.src):
git.clone(c.name, c.git, c.src, debug=debug)
if c.dst:
git.extract(c.src, c.dst, c.version, debug=debug)
post_commands = {c.dst: c.post_commands}
else:
git.overlay(c.src, c.files, c.version, debug=debug)
post_commands = {
conf.dst: conf.post_commands
for conf in c.files
}
# Run post commands if any.
for dst, commands in post_commands.items():
for command in commands:
msg = ' - running `{}` in {}'.format(command, dst)
util.print_info(msg)
cmd = util.build_sh_cmd(command, cwd=dst)
util.run_command(cmd, debug=debug) | [
"def",
"overlay",
"(",
"ctx",
")",
":",
"# pragma: no cover",
"args",
"=",
"ctx",
".",
"obj",
".",
"get",
"(",
"'args'",
")",
"filename",
"=",
"args",
".",
"get",
"(",
"'config'",
")",
"debug",
"=",
"args",
".",
"get",
"(",
"'debug'",
")",
"_setup",
"(",
"filename",
")",
"for",
"c",
"in",
"config",
".",
"config",
"(",
"filename",
")",
":",
"with",
"fasteners",
".",
"InterProcessLock",
"(",
"c",
".",
"lock_file",
")",
":",
"util",
".",
"print_info",
"(",
"'{}:'",
".",
"format",
"(",
"c",
".",
"name",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"c",
".",
"src",
")",
":",
"git",
".",
"clone",
"(",
"c",
".",
"name",
",",
"c",
".",
"git",
",",
"c",
".",
"src",
",",
"debug",
"=",
"debug",
")",
"if",
"c",
".",
"dst",
":",
"git",
".",
"extract",
"(",
"c",
".",
"src",
",",
"c",
".",
"dst",
",",
"c",
".",
"version",
",",
"debug",
"=",
"debug",
")",
"post_commands",
"=",
"{",
"c",
".",
"dst",
":",
"c",
".",
"post_commands",
"}",
"else",
":",
"git",
".",
"overlay",
"(",
"c",
".",
"src",
",",
"c",
".",
"files",
",",
"c",
".",
"version",
",",
"debug",
"=",
"debug",
")",
"post_commands",
"=",
"{",
"conf",
".",
"dst",
":",
"conf",
".",
"post_commands",
"for",
"conf",
"in",
"c",
".",
"files",
"}",
"# Run post commands if any.",
"for",
"dst",
",",
"commands",
"in",
"post_commands",
".",
"items",
"(",
")",
":",
"for",
"command",
"in",
"commands",
":",
"msg",
"=",
"' - running `{}` in {}'",
".",
"format",
"(",
"command",
",",
"dst",
")",
"util",
".",
"print_info",
"(",
"msg",
")",
"cmd",
"=",
"util",
".",
"build_sh_cmd",
"(",
"command",
",",
"cwd",
"=",
"dst",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")"
]
| Install gilt dependencies | [
"Install",
"gilt",
"dependencies"
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/shell.py#L60-L87 | train |
metacloud/gilt | gilt/git.py | clone | def clone(name, repository, destination, debug=False):
"""
Clone the specified repository into a temporary directory and return None.
:param name: A string containing the name of the repository being cloned.
:param repository: A string containing the repository to clone.
:param destination: A string containing the directory to clone the
repository into.
:param debug: An optional bool to toggle debug output.
:return: None
"""
msg = ' - cloning {} to {}'.format(name, destination)
util.print_info(msg)
cmd = sh.git.bake('clone', repository, destination)
util.run_command(cmd, debug=debug) | python | def clone(name, repository, destination, debug=False):
"""
Clone the specified repository into a temporary directory and return None.
:param name: A string containing the name of the repository being cloned.
:param repository: A string containing the repository to clone.
:param destination: A string containing the directory to clone the
repository into.
:param debug: An optional bool to toggle debug output.
:return: None
"""
msg = ' - cloning {} to {}'.format(name, destination)
util.print_info(msg)
cmd = sh.git.bake('clone', repository, destination)
util.run_command(cmd, debug=debug) | [
"def",
"clone",
"(",
"name",
",",
"repository",
",",
"destination",
",",
"debug",
"=",
"False",
")",
":",
"msg",
"=",
"' - cloning {} to {}'",
".",
"format",
"(",
"name",
",",
"destination",
")",
"util",
".",
"print_info",
"(",
"msg",
")",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'clone'",
",",
"repository",
",",
"destination",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")"
]
| Clone the specified repository into a temporary directory and return None.
:param name: A string containing the name of the repository being cloned.
:param repository: A string containing the repository to clone.
:param destination: A string containing the directory to clone the
repository into.
:param debug: An optional bool to toggle debug output.
:return: None | [
"Clone",
"the",
"specified",
"repository",
"into",
"a",
"temporary",
"directory",
"and",
"return",
"None",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/git.py#L32-L46 | train |
metacloud/gilt | gilt/git.py | _get_version | def _get_version(version, debug=False):
"""
Handle switching to the specified version and return None.
1. Fetch the origin.
2. Checkout the specified version.
3. Clean the repository before we begin.
4. Pull the origin when a branch; _not_ a commit id.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
"""
if not any(
(_has_branch(version, debug), _has_tag(version, debug), _has_commit(
version, debug))):
cmd = sh.git.bake('fetch')
util.run_command(cmd, debug=debug)
cmd = sh.git.bake('checkout', version)
util.run_command(cmd, debug=debug)
cmd = sh.git.bake('clean', '-d', '-x', '-f')
util.run_command(cmd, debug=debug)
if _has_branch(version, debug):
cmd = sh.git.bake('pull', rebase=True, ff_only=True)
util.run_command(cmd, debug=debug) | python | def _get_version(version, debug=False):
"""
Handle switching to the specified version and return None.
1. Fetch the origin.
2. Checkout the specified version.
3. Clean the repository before we begin.
4. Pull the origin when a branch; _not_ a commit id.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
"""
if not any(
(_has_branch(version, debug), _has_tag(version, debug), _has_commit(
version, debug))):
cmd = sh.git.bake('fetch')
util.run_command(cmd, debug=debug)
cmd = sh.git.bake('checkout', version)
util.run_command(cmd, debug=debug)
cmd = sh.git.bake('clean', '-d', '-x', '-f')
util.run_command(cmd, debug=debug)
if _has_branch(version, debug):
cmd = sh.git.bake('pull', rebase=True, ff_only=True)
util.run_command(cmd, debug=debug) | [
"def",
"_get_version",
"(",
"version",
",",
"debug",
"=",
"False",
")",
":",
"if",
"not",
"any",
"(",
"(",
"_has_branch",
"(",
"version",
",",
"debug",
")",
",",
"_has_tag",
"(",
"version",
",",
"debug",
")",
",",
"_has_commit",
"(",
"version",
",",
"debug",
")",
")",
")",
":",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'fetch'",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'checkout'",
",",
"version",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'clean'",
",",
"'-d'",
",",
"'-x'",
",",
"'-f'",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")",
"if",
"_has_branch",
"(",
"version",
",",
"debug",
")",
":",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'pull'",
",",
"rebase",
"=",
"True",
",",
"ff_only",
"=",
"True",
")",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")"
]
| Handle switching to the specified version and return None.
1. Fetch the origin.
2. Checkout the specified version.
3. Clean the repository before we begin.
4. Pull the origin when a branch; _not_ a commit id.
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None | [
"Handle",
"switching",
"to",
"the",
"specified",
"version",
"and",
"return",
"None",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/git.py#L109-L133 | train |
metacloud/gilt | gilt/git.py | _has_commit | def _has_commit(version, debug=False):
"""
Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
if _has_tag(version, debug) or _has_branch(version, debug):
return False
cmd = sh.git.bake('cat-file', '-e', version)
try:
util.run_command(cmd, debug=debug)
return True
except sh.ErrorReturnCode:
return False | python | def _has_commit(version, debug=False):
"""
Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
if _has_tag(version, debug) or _has_branch(version, debug):
return False
cmd = sh.git.bake('cat-file', '-e', version)
try:
util.run_command(cmd, debug=debug)
return True
except sh.ErrorReturnCode:
return False | [
"def",
"_has_commit",
"(",
"version",
",",
"debug",
"=",
"False",
")",
":",
"if",
"_has_tag",
"(",
"version",
",",
"debug",
")",
"or",
"_has_branch",
"(",
"version",
",",
"debug",
")",
":",
"return",
"False",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'cat-file'",
",",
"'-e'",
",",
"version",
")",
"try",
":",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")",
"return",
"True",
"except",
"sh",
".",
"ErrorReturnCode",
":",
"return",
"False"
]
| Determine a version is a local git commit sha or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool | [
"Determine",
"a",
"version",
"is",
"a",
"local",
"git",
"commit",
"sha",
"or",
"not",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/git.py#L136-L151 | train |
metacloud/gilt | gilt/git.py | _has_tag | def _has_tag(version, debug=False):
"""
Determine a version is a local git tag name or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
cmd = sh.git.bake('show-ref', '--verify', '--quiet',
"refs/tags/{}".format(version))
try:
util.run_command(cmd, debug=debug)
return True
except sh.ErrorReturnCode:
return False | python | def _has_tag(version, debug=False):
"""
Determine a version is a local git tag name or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool
"""
cmd = sh.git.bake('show-ref', '--verify', '--quiet',
"refs/tags/{}".format(version))
try:
util.run_command(cmd, debug=debug)
return True
except sh.ErrorReturnCode:
return False | [
"def",
"_has_tag",
"(",
"version",
",",
"debug",
"=",
"False",
")",
":",
"cmd",
"=",
"sh",
".",
"git",
".",
"bake",
"(",
"'show-ref'",
",",
"'--verify'",
",",
"'--quiet'",
",",
"\"refs/tags/{}\"",
".",
"format",
"(",
"version",
")",
")",
"try",
":",
"util",
".",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"debug",
")",
"return",
"True",
"except",
"sh",
".",
"ErrorReturnCode",
":",
"return",
"False"
]
| Determine a version is a local git tag name or not.
:param version: A string containing the branch/tag/sha to be determined.
:param debug: An optional bool to toggle debug output.
:return: bool | [
"Determine",
"a",
"version",
"is",
"a",
"local",
"git",
"tag",
"name",
"or",
"not",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/git.py#L154-L168 | train |
metacloud/gilt | gilt/util.py | run_command | def run_command(cmd, debug=False):
"""
Execute the given command and return None.
:param cmd: A `sh.Command` object to execute.
:param debug: An optional bool to toggle debug output.
:return: None
"""
if debug:
msg = ' PWD: {}'.format(os.getcwd())
print_warn(msg)
msg = ' COMMAND: {}'.format(cmd)
print_warn(msg)
cmd() | python | def run_command(cmd, debug=False):
"""
Execute the given command and return None.
:param cmd: A `sh.Command` object to execute.
:param debug: An optional bool to toggle debug output.
:return: None
"""
if debug:
msg = ' PWD: {}'.format(os.getcwd())
print_warn(msg)
msg = ' COMMAND: {}'.format(cmd)
print_warn(msg)
cmd() | [
"def",
"run_command",
"(",
"cmd",
",",
"debug",
"=",
"False",
")",
":",
"if",
"debug",
":",
"msg",
"=",
"' PWD: {}'",
".",
"format",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
"print_warn",
"(",
"msg",
")",
"msg",
"=",
"' COMMAND: {}'",
".",
"format",
"(",
"cmd",
")",
"print_warn",
"(",
"msg",
")",
"cmd",
"(",
")"
]
| Execute the given command and return None.
:param cmd: A `sh.Command` object to execute.
:param debug: An optional bool to toggle debug output.
:return: None | [
"Execute",
"the",
"given",
"command",
"and",
"return",
"None",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/util.py#L46-L59 | train |
metacloud/gilt | gilt/util.py | build_sh_cmd | def build_sh_cmd(cmd, cwd=None):
"""Build a `sh.Command` from a string.
:param cmd: String with the command to convert.
:param cwd: Optional path to use as working directory.
:return: `sh.Command`
"""
args = cmd.split()
return getattr(sh, args[0]).bake(_cwd=cwd, *args[1:]) | python | def build_sh_cmd(cmd, cwd=None):
"""Build a `sh.Command` from a string.
:param cmd: String with the command to convert.
:param cwd: Optional path to use as working directory.
:return: `sh.Command`
"""
args = cmd.split()
return getattr(sh, args[0]).bake(_cwd=cwd, *args[1:]) | [
"def",
"build_sh_cmd",
"(",
"cmd",
",",
"cwd",
"=",
"None",
")",
":",
"args",
"=",
"cmd",
".",
"split",
"(",
")",
"return",
"getattr",
"(",
"sh",
",",
"args",
"[",
"0",
"]",
")",
".",
"bake",
"(",
"_cwd",
"=",
"cwd",
",",
"*",
"args",
"[",
"1",
":",
"]",
")"
]
| Build a `sh.Command` from a string.
:param cmd: String with the command to convert.
:param cwd: Optional path to use as working directory.
:return: `sh.Command` | [
"Build",
"a",
"sh",
".",
"Command",
"from",
"a",
"string",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/util.py#L62-L70 | train |
metacloud/gilt | gilt/util.py | copy | def copy(src, dst):
"""
Handle the copying of a file or directory.
The destination basedir _must_ exist.
:param src: A string containing the path of the source to copy. If the
source ends with a '/', will become a recursive directory copy of source.
:param dst: A string containing the path to the destination. If the
destination ends with a '/', will copy into the target directory.
:return: None
"""
try:
shutil.copytree(src, dst)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else:
raise | python | def copy(src, dst):
"""
Handle the copying of a file or directory.
The destination basedir _must_ exist.
:param src: A string containing the path of the source to copy. If the
source ends with a '/', will become a recursive directory copy of source.
:param dst: A string containing the path to the destination. If the
destination ends with a '/', will copy into the target directory.
:return: None
"""
try:
shutil.copytree(src, dst)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else:
raise | [
"def",
"copy",
"(",
"src",
",",
"dst",
")",
":",
"try",
":",
"shutil",
".",
"copytree",
"(",
"src",
",",
"dst",
")",
"except",
"OSError",
"as",
"exc",
":",
"if",
"exc",
".",
"errno",
"==",
"errno",
".",
"ENOTDIR",
":",
"shutil",
".",
"copy",
"(",
"src",
",",
"dst",
")",
"else",
":",
"raise"
]
| Handle the copying of a file or directory.
The destination basedir _must_ exist.
:param src: A string containing the path of the source to copy. If the
source ends with a '/', will become a recursive directory copy of source.
:param dst: A string containing the path to the destination. If the
destination ends with a '/', will copy into the target directory.
:return: None | [
"Handle",
"the",
"copying",
"of",
"a",
"file",
"or",
"directory",
"."
]
| 234eec23fe2f8144369d0ec3b35ad2fef508b8d1 | https://github.com/metacloud/gilt/blob/234eec23fe2f8144369d0ec3b35ad2fef508b8d1/gilt/util.py#L83-L101 | train |
yhat/db.py | db/table.py | Table.to_dict | def to_dict(self):
"""Serialize representation of the table for local caching."""
return {'schema': self.schema, 'name': self.name, 'columns': [col.to_dict() for col in self._columns],
'foreign_keys': self.foreign_keys.to_dict(), 'ref_keys': self.ref_keys.to_dict()} | python | def to_dict(self):
"""Serialize representation of the table for local caching."""
return {'schema': self.schema, 'name': self.name, 'columns': [col.to_dict() for col in self._columns],
'foreign_keys': self.foreign_keys.to_dict(), 'ref_keys': self.ref_keys.to_dict()} | [
"def",
"to_dict",
"(",
"self",
")",
":",
"return",
"{",
"'schema'",
":",
"self",
".",
"schema",
",",
"'name'",
":",
"self",
".",
"name",
",",
"'columns'",
":",
"[",
"col",
".",
"to_dict",
"(",
")",
"for",
"col",
"in",
"self",
".",
"_columns",
"]",
",",
"'foreign_keys'",
":",
"self",
".",
"foreign_keys",
".",
"to_dict",
"(",
")",
",",
"'ref_keys'",
":",
"self",
".",
"ref_keys",
".",
"to_dict",
"(",
")",
"}"
]
| Serialize representation of the table for local caching. | [
"Serialize",
"representation",
"of",
"the",
"table",
"for",
"local",
"caching",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/table.py#L348-L351 | train |
yhat/db.py | db/db.py | list_profiles | def list_profiles():
"""
Lists all of the database profiles available
Examples
--------
No doctest, covered by unittest
list_profiles()
{'demo': {u'dbname': None,
u'dbtype': u'sqlite',
u'filename': u'/Users/glamp/repos/yhat/opensource/db.py/db/data/chinook.sqlite',
u'hostname': u'localhost',
u'password': None,
u'port': 5432,
u'username': None},
'muppets': {u'dbname': u'muppetdb',
u'dbtype': u'postgres',
u'filename': None,
u'hostname': u'muppets.yhathq.com',
u'password': None,
u'port': 5432,
u'username': u'kermit'}}
"""
profiles = {}
user = os.path.expanduser("~")
for f in os.listdir(user):
if f.startswith(".db.py_"):
profile = load_from_json(os.path.join(user, f))
tables = profile.pop('tables', None)
if tables:
profile['metadata'] = True
else:
profile['metadata'] = False
profiles[f[7:]] = profile
return profiles | python | def list_profiles():
"""
Lists all of the database profiles available
Examples
--------
No doctest, covered by unittest
list_profiles()
{'demo': {u'dbname': None,
u'dbtype': u'sqlite',
u'filename': u'/Users/glamp/repos/yhat/opensource/db.py/db/data/chinook.sqlite',
u'hostname': u'localhost',
u'password': None,
u'port': 5432,
u'username': None},
'muppets': {u'dbname': u'muppetdb',
u'dbtype': u'postgres',
u'filename': None,
u'hostname': u'muppets.yhathq.com',
u'password': None,
u'port': 5432,
u'username': u'kermit'}}
"""
profiles = {}
user = os.path.expanduser("~")
for f in os.listdir(user):
if f.startswith(".db.py_"):
profile = load_from_json(os.path.join(user, f))
tables = profile.pop('tables', None)
if tables:
profile['metadata'] = True
else:
profile['metadata'] = False
profiles[f[7:]] = profile
return profiles | [
"def",
"list_profiles",
"(",
")",
":",
"profiles",
"=",
"{",
"}",
"user",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"user",
")",
":",
"if",
"f",
".",
"startswith",
"(",
"\".db.py_\"",
")",
":",
"profile",
"=",
"load_from_json",
"(",
"os",
".",
"path",
".",
"join",
"(",
"user",
",",
"f",
")",
")",
"tables",
"=",
"profile",
".",
"pop",
"(",
"'tables'",
",",
"None",
")",
"if",
"tables",
":",
"profile",
"[",
"'metadata'",
"]",
"=",
"True",
"else",
":",
"profile",
"[",
"'metadata'",
"]",
"=",
"False",
"profiles",
"[",
"f",
"[",
"7",
":",
"]",
"]",
"=",
"profile",
"return",
"profiles"
]
| Lists all of the database profiles available
Examples
--------
No doctest, covered by unittest
list_profiles()
{'demo': {u'dbname': None,
u'dbtype': u'sqlite',
u'filename': u'/Users/glamp/repos/yhat/opensource/db.py/db/data/chinook.sqlite',
u'hostname': u'localhost',
u'password': None,
u'port': 5432,
u'username': None},
'muppets': {u'dbname': u'muppetdb',
u'dbtype': u'postgres',
u'filename': None,
u'hostname': u'muppets.yhathq.com',
u'password': None,
u'port': 5432,
u'username': u'kermit'}} | [
"Lists",
"all",
"of",
"the",
"database",
"profiles",
"available"
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L1059-L1094 | train |
yhat/db.py | db/db.py | remove_profile | def remove_profile(name, s3=False):
"""
Removes a profile from your config
"""
user = os.path.expanduser("~")
if s3:
f = os.path.join(user, S3_PROFILE_ID + name)
else:
f = os.path.join(user, DBPY_PROFILE_ID + name)
try:
try:
open(f)
except:
raise Exception("Profile '{0}' does not exist. Could not find file {1}".format(name, f))
os.remove(f)
except Exception as e:
raise Exception("Could not remove profile {0}! Excpetion: {1}".format(name, e)) | python | def remove_profile(name, s3=False):
"""
Removes a profile from your config
"""
user = os.path.expanduser("~")
if s3:
f = os.path.join(user, S3_PROFILE_ID + name)
else:
f = os.path.join(user, DBPY_PROFILE_ID + name)
try:
try:
open(f)
except:
raise Exception("Profile '{0}' does not exist. Could not find file {1}".format(name, f))
os.remove(f)
except Exception as e:
raise Exception("Could not remove profile {0}! Excpetion: {1}".format(name, e)) | [
"def",
"remove_profile",
"(",
"name",
",",
"s3",
"=",
"False",
")",
":",
"user",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
"if",
"s3",
":",
"f",
"=",
"os",
".",
"path",
".",
"join",
"(",
"user",
",",
"S3_PROFILE_ID",
"+",
"name",
")",
"else",
":",
"f",
"=",
"os",
".",
"path",
".",
"join",
"(",
"user",
",",
"DBPY_PROFILE_ID",
"+",
"name",
")",
"try",
":",
"try",
":",
"open",
"(",
"f",
")",
"except",
":",
"raise",
"Exception",
"(",
"\"Profile '{0}' does not exist. Could not find file {1}\"",
".",
"format",
"(",
"name",
",",
"f",
")",
")",
"os",
".",
"remove",
"(",
"f",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"Exception",
"(",
"\"Could not remove profile {0}! Excpetion: {1}\"",
".",
"format",
"(",
"name",
",",
"e",
")",
")"
]
| Removes a profile from your config | [
"Removes",
"a",
"profile",
"from",
"your",
"config"
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L1097-L1114 | train |
yhat/db.py | db/db.py | DB.tables | def tables(self):
"""A lazy loaded reference to the table metadata for the DB."""
if len(self._tables) == 0:
self.refresh_schema(self._exclude_system_tables, self._use_cache)
return self._tables | python | def tables(self):
"""A lazy loaded reference to the table metadata for the DB."""
if len(self._tables) == 0:
self.refresh_schema(self._exclude_system_tables, self._use_cache)
return self._tables | [
"def",
"tables",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"_tables",
")",
"==",
"0",
":",
"self",
".",
"refresh_schema",
"(",
"self",
".",
"_exclude_system_tables",
",",
"self",
".",
"_use_cache",
")",
"return",
"self",
".",
"_tables"
]
| A lazy loaded reference to the table metadata for the DB. | [
"A",
"lazy",
"loaded",
"reference",
"to",
"the",
"table",
"metadata",
"for",
"the",
"DB",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L262-L266 | train |
yhat/db.py | db/db.py | DB.save_credentials | def save_credentials(self, profile="default"):
"""
Save your database credentials so you don't have to save them in script.
Parameters
----------
profile: str
(optional) identifier/name for your database (i.e. "dw", "prod")
from db import DB
import pymysql
db = DB(username="hank", password="foo", hostname="prod.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="production")
db = DB(username="hank", password="foo", hostname="staging.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="staging")
db = DB(profile="staging")
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.save_credentials(profile='test')
"""
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.credentials) | python | def save_credentials(self, profile="default"):
"""
Save your database credentials so you don't have to save them in script.
Parameters
----------
profile: str
(optional) identifier/name for your database (i.e. "dw", "prod")
from db import DB
import pymysql
db = DB(username="hank", password="foo", hostname="prod.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="production")
db = DB(username="hank", password="foo", hostname="staging.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="staging")
db = DB(profile="staging")
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.save_credentials(profile='test')
"""
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.credentials) | [
"def",
"save_credentials",
"(",
"self",
",",
"profile",
"=",
"\"default\"",
")",
":",
"f",
"=",
"profile_path",
"(",
"DBPY_PROFILE_ID",
",",
"profile",
")",
"dump_to_json",
"(",
"f",
",",
"self",
".",
"credentials",
")"
]
| Save your database credentials so you don't have to save them in script.
Parameters
----------
profile: str
(optional) identifier/name for your database (i.e. "dw", "prod")
from db import DB
import pymysql
db = DB(username="hank", password="foo", hostname="prod.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="production")
db = DB(username="hank", password="foo", hostname="staging.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="staging")
db = DB(profile="staging")
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.save_credentials(profile='test') | [
"Save",
"your",
"database",
"credentials",
"so",
"you",
"don",
"t",
"have",
"to",
"save",
"them",
"in",
"script",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L307-L329 | train |
yhat/db.py | db/db.py | DB.save_metadata | def save_metadata(self, profile="default"):
"""Save the database credentials, plus the database properties to your db.py profile."""
if len(self.tables) > 0:
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.to_dict()) | python | def save_metadata(self, profile="default"):
"""Save the database credentials, plus the database properties to your db.py profile."""
if len(self.tables) > 0:
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.to_dict()) | [
"def",
"save_metadata",
"(",
"self",
",",
"profile",
"=",
"\"default\"",
")",
":",
"if",
"len",
"(",
"self",
".",
"tables",
")",
">",
"0",
":",
"f",
"=",
"profile_path",
"(",
"DBPY_PROFILE_ID",
",",
"profile",
")",
"dump_to_json",
"(",
"f",
",",
"self",
".",
"to_dict",
"(",
")",
")"
]
| Save the database credentials, plus the database properties to your db.py profile. | [
"Save",
"the",
"database",
"credentials",
"plus",
"the",
"database",
"properties",
"to",
"your",
"db",
".",
"py",
"profile",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L338-L342 | train |
yhat/db.py | db/db.py | DB.credentials | def credentials(self):
"""Dict representation of all credentials for the database."""
if self.filename:
db_filename = os.path.join(os.getcwd(), self.filename)
else:
db_filename = None
return {
"username": self.username,
"password": self.password,
"hostname": self.hostname,
"port": self.port,
"filename": db_filename,
"dbname": self.dbname,
"dbtype": self.dbtype,
"schemas": self.schemas,
"limit": self.limit,
"keys_per_column": self.keys_per_column,
} | python | def credentials(self):
"""Dict representation of all credentials for the database."""
if self.filename:
db_filename = os.path.join(os.getcwd(), self.filename)
else:
db_filename = None
return {
"username": self.username,
"password": self.password,
"hostname": self.hostname,
"port": self.port,
"filename": db_filename,
"dbname": self.dbname,
"dbtype": self.dbtype,
"schemas": self.schemas,
"limit": self.limit,
"keys_per_column": self.keys_per_column,
} | [
"def",
"credentials",
"(",
"self",
")",
":",
"if",
"self",
".",
"filename",
":",
"db_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"self",
".",
"filename",
")",
"else",
":",
"db_filename",
"=",
"None",
"return",
"{",
"\"username\"",
":",
"self",
".",
"username",
",",
"\"password\"",
":",
"self",
".",
"password",
",",
"\"hostname\"",
":",
"self",
".",
"hostname",
",",
"\"port\"",
":",
"self",
".",
"port",
",",
"\"filename\"",
":",
"db_filename",
",",
"\"dbname\"",
":",
"self",
".",
"dbname",
",",
"\"dbtype\"",
":",
"self",
".",
"dbtype",
",",
"\"schemas\"",
":",
"self",
".",
"schemas",
",",
"\"limit\"",
":",
"self",
".",
"limit",
",",
"\"keys_per_column\"",
":",
"self",
".",
"keys_per_column",
",",
"}"
]
| Dict representation of all credentials for the database. | [
"Dict",
"representation",
"of",
"all",
"credentials",
"for",
"the",
"database",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L345-L363 | train |
yhat/db.py | db/db.py | DB.find_table | def find_table(self, search):
"""
Aggresively search through your database's schema for a table.
Parameters
-----------
search: str
glob pattern for what you're looking for
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.find_table("A*")
+--------+--------------------------+
| Table | Columns |
+--------+--------------------------+
| Album | AlbumId, Title, ArtistId |
| Artist | ArtistId, Name |
+--------+--------------------------+
>>> results = db.find_table("tmp*") # returns all tables prefixed w/ tmp
>>> results = db.find_table("prod_*") # returns all tables prefixed w/ prod_
>>> results = db.find_table("*Invoice*") # returns all tables containing trans
>>> results = db.find_table("*") # returns everything
"""
tables = []
for table in self.tables:
if glob.fnmatch.fnmatch(table.name, search):
tables.append(table)
return TableSet(tables) | python | def find_table(self, search):
"""
Aggresively search through your database's schema for a table.
Parameters
-----------
search: str
glob pattern for what you're looking for
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.find_table("A*")
+--------+--------------------------+
| Table | Columns |
+--------+--------------------------+
| Album | AlbumId, Title, ArtistId |
| Artist | ArtistId, Name |
+--------+--------------------------+
>>> results = db.find_table("tmp*") # returns all tables prefixed w/ tmp
>>> results = db.find_table("prod_*") # returns all tables prefixed w/ prod_
>>> results = db.find_table("*Invoice*") # returns all tables containing trans
>>> results = db.find_table("*") # returns everything
"""
tables = []
for table in self.tables:
if glob.fnmatch.fnmatch(table.name, search):
tables.append(table)
return TableSet(tables) | [
"def",
"find_table",
"(",
"self",
",",
"search",
")",
":",
"tables",
"=",
"[",
"]",
"for",
"table",
"in",
"self",
".",
"tables",
":",
"if",
"glob",
".",
"fnmatch",
".",
"fnmatch",
"(",
"table",
".",
"name",
",",
"search",
")",
":",
"tables",
".",
"append",
"(",
"table",
")",
"return",
"TableSet",
"(",
"tables",
")"
]
| Aggresively search through your database's schema for a table.
Parameters
-----------
search: str
glob pattern for what you're looking for
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.find_table("A*")
+--------+--------------------------+
| Table | Columns |
+--------+--------------------------+
| Album | AlbumId, Title, ArtistId |
| Artist | ArtistId, Name |
+--------+--------------------------+
>>> results = db.find_table("tmp*") # returns all tables prefixed w/ tmp
>>> results = db.find_table("prod_*") # returns all tables prefixed w/ prod_
>>> results = db.find_table("*Invoice*") # returns all tables containing trans
>>> results = db.find_table("*") # returns everything | [
"Aggresively",
"search",
"through",
"your",
"database",
"s",
"schema",
"for",
"a",
"table",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L365-L394 | train |
yhat/db.py | db/db.py | DB.find_column | def find_column(self, search, data_type=None):
"""
Aggresively search through your database's schema for a column.
Parameters
-----------
search: str
glob pattern for what you're looking for
data_type: str, list
(optional) specify which data type(s) you want to return
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> len(db.find_column("Name").columns)
5
>>> len(db.find_column("*Id").columns)
20
>>> len(db.find_column("*Address*").columns)
3
>>> len(db.find_column("*Address*", data_type="NVARCHAR(70)").columns)
3
>>> len(db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]).columns)
17
-= Should sort in some way for all those doctests to be viable...
-= if not, there's always a random issue where rows are not in the same order, making doctest fail.
db.find_column("Name") # returns all columns named "Name"
+-----------+-------------+---------------+
| Table | Column Name | Type |
+-----------+-------------+---------------+
| Artist | Name | NVARCHAR(120) |
| Genre | Name | NVARCHAR(120) |
| MediaType | Name | NVARCHAR(120) |
| Playlist | Name | NVARCHAR(120) |
| Track | Name | NVARCHAR(200) |
+-----------+-------------+---------------+
db.find_column("*Id") # returns all columns ending w/ Id
+---------------+---------------+---------+
| Table | Column Name | Type |
+---------------+---------------+---------+
| Album | AlbumId | INTEGER |
| Album | ArtistId | INTEGER |
| Artist | ArtistId | INTEGER |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | EmployeeId | INTEGER |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| InvoiceLine | TrackId | INTEGER |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Playlist | PlaylistId | INTEGER |
| PlaylistTrack | TrackId | INTEGER |
| PlaylistTrack | PlaylistId | INTEGER |
| Track | TrackId | INTEGER |
| Track | AlbumId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | GenreId | INTEGER |
+---------------+---------------+---------+
db.find_column("*Address*") # returns all columns containing Address
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*Address*", data_type="NVARCHAR(70)") # returns all columns containing Address that are varchars
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]) # returns all columns have an "e" and are NVARCHAR(70)S or INTEGERS
+-------------+----------------+--------------+
| Table | Column Name | Type |
+-------------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | ReportsTo | INTEGER |
| Employee | EmployeeId | INTEGER |
| Employee | Address | NVARCHAR(70) |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| Invoice | BillingAddress | NVARCHAR(70) |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | Milliseconds | INTEGER |
| Track | GenreId | INTEGER |
| Track | Bytes | INTEGER |
+-------------+----------------+--------------+
"""
if isinstance(data_type, str):
data_type = [data_type]
cols = []
for table in self.tables:
for col in vars(table):
if glob.fnmatch.fnmatch(col, search):
if data_type and isinstance(getattr(table, col), Column) and getattr(table, col).type not in data_type:
continue
if isinstance(getattr(table, col), Column):
cols.append(getattr(table, col))
return ColumnSet(cols) | python | def find_column(self, search, data_type=None):
"""
Aggresively search through your database's schema for a column.
Parameters
-----------
search: str
glob pattern for what you're looking for
data_type: str, list
(optional) specify which data type(s) you want to return
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> len(db.find_column("Name").columns)
5
>>> len(db.find_column("*Id").columns)
20
>>> len(db.find_column("*Address*").columns)
3
>>> len(db.find_column("*Address*", data_type="NVARCHAR(70)").columns)
3
>>> len(db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]).columns)
17
-= Should sort in some way for all those doctests to be viable...
-= if not, there's always a random issue where rows are not in the same order, making doctest fail.
db.find_column("Name") # returns all columns named "Name"
+-----------+-------------+---------------+
| Table | Column Name | Type |
+-----------+-------------+---------------+
| Artist | Name | NVARCHAR(120) |
| Genre | Name | NVARCHAR(120) |
| MediaType | Name | NVARCHAR(120) |
| Playlist | Name | NVARCHAR(120) |
| Track | Name | NVARCHAR(200) |
+-----------+-------------+---------------+
db.find_column("*Id") # returns all columns ending w/ Id
+---------------+---------------+---------+
| Table | Column Name | Type |
+---------------+---------------+---------+
| Album | AlbumId | INTEGER |
| Album | ArtistId | INTEGER |
| Artist | ArtistId | INTEGER |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | EmployeeId | INTEGER |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| InvoiceLine | TrackId | INTEGER |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Playlist | PlaylistId | INTEGER |
| PlaylistTrack | TrackId | INTEGER |
| PlaylistTrack | PlaylistId | INTEGER |
| Track | TrackId | INTEGER |
| Track | AlbumId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | GenreId | INTEGER |
+---------------+---------------+---------+
db.find_column("*Address*") # returns all columns containing Address
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*Address*", data_type="NVARCHAR(70)") # returns all columns containing Address that are varchars
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]) # returns all columns have an "e" and are NVARCHAR(70)S or INTEGERS
+-------------+----------------+--------------+
| Table | Column Name | Type |
+-------------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | ReportsTo | INTEGER |
| Employee | EmployeeId | INTEGER |
| Employee | Address | NVARCHAR(70) |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| Invoice | BillingAddress | NVARCHAR(70) |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | Milliseconds | INTEGER |
| Track | GenreId | INTEGER |
| Track | Bytes | INTEGER |
+-------------+----------------+--------------+
"""
if isinstance(data_type, str):
data_type = [data_type]
cols = []
for table in self.tables:
for col in vars(table):
if glob.fnmatch.fnmatch(col, search):
if data_type and isinstance(getattr(table, col), Column) and getattr(table, col).type not in data_type:
continue
if isinstance(getattr(table, col), Column):
cols.append(getattr(table, col))
return ColumnSet(cols) | [
"def",
"find_column",
"(",
"self",
",",
"search",
",",
"data_type",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"data_type",
",",
"str",
")",
":",
"data_type",
"=",
"[",
"data_type",
"]",
"cols",
"=",
"[",
"]",
"for",
"table",
"in",
"self",
".",
"tables",
":",
"for",
"col",
"in",
"vars",
"(",
"table",
")",
":",
"if",
"glob",
".",
"fnmatch",
".",
"fnmatch",
"(",
"col",
",",
"search",
")",
":",
"if",
"data_type",
"and",
"isinstance",
"(",
"getattr",
"(",
"table",
",",
"col",
")",
",",
"Column",
")",
"and",
"getattr",
"(",
"table",
",",
"col",
")",
".",
"type",
"not",
"in",
"data_type",
":",
"continue",
"if",
"isinstance",
"(",
"getattr",
"(",
"table",
",",
"col",
")",
",",
"Column",
")",
":",
"cols",
".",
"append",
"(",
"getattr",
"(",
"table",
",",
"col",
")",
")",
"return",
"ColumnSet",
"(",
"cols",
")"
]
| Aggresively search through your database's schema for a column.
Parameters
-----------
search: str
glob pattern for what you're looking for
data_type: str, list
(optional) specify which data type(s) you want to return
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> len(db.find_column("Name").columns)
5
>>> len(db.find_column("*Id").columns)
20
>>> len(db.find_column("*Address*").columns)
3
>>> len(db.find_column("*Address*", data_type="NVARCHAR(70)").columns)
3
>>> len(db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]).columns)
17
-= Should sort in some way for all those doctests to be viable...
-= if not, there's always a random issue where rows are not in the same order, making doctest fail.
db.find_column("Name") # returns all columns named "Name"
+-----------+-------------+---------------+
| Table | Column Name | Type |
+-----------+-------------+---------------+
| Artist | Name | NVARCHAR(120) |
| Genre | Name | NVARCHAR(120) |
| MediaType | Name | NVARCHAR(120) |
| Playlist | Name | NVARCHAR(120) |
| Track | Name | NVARCHAR(200) |
+-----------+-------------+---------------+
db.find_column("*Id") # returns all columns ending w/ Id
+---------------+---------------+---------+
| Table | Column Name | Type |
+---------------+---------------+---------+
| Album | AlbumId | INTEGER |
| Album | ArtistId | INTEGER |
| Artist | ArtistId | INTEGER |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | EmployeeId | INTEGER |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| InvoiceLine | TrackId | INTEGER |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Playlist | PlaylistId | INTEGER |
| PlaylistTrack | TrackId | INTEGER |
| PlaylistTrack | PlaylistId | INTEGER |
| Track | TrackId | INTEGER |
| Track | AlbumId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | GenreId | INTEGER |
+---------------+---------------+---------+
db.find_column("*Address*") # returns all columns containing Address
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*Address*", data_type="NVARCHAR(70)") # returns all columns containing Address that are varchars
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]) # returns all columns have an "e" and are NVARCHAR(70)S or INTEGERS
+-------------+----------------+--------------+
| Table | Column Name | Type |
+-------------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | ReportsTo | INTEGER |
| Employee | EmployeeId | INTEGER |
| Employee | Address | NVARCHAR(70) |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| Invoice | BillingAddress | NVARCHAR(70) |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | Milliseconds | INTEGER |
| Track | GenreId | INTEGER |
| Track | Bytes | INTEGER |
+-------------+----------------+--------------+ | [
"Aggresively",
"search",
"through",
"your",
"database",
"s",
"schema",
"for",
"a",
"column",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L396-L508 | train |
yhat/db.py | db/db.py | DB.query | def query(self, q, data=None, union=True, limit=None):
"""
Query your database with a raw string.
Parameters
----------
q: str
Query string to execute
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
db.query("select * from Track").head(2)
TrackId Name AlbumId MediaTypeId \\\r
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
<BLANKLINE>
GenreId Composer Milliseconds Bytes \\\r
0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334
1 1 None 342562 5510424
<BLANKLINE>
UnitPrice
0 0.99
1 0.99
db.query("select * from Track", limit=10)
TrackId Name AlbumId MediaTypeId \
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
2 3 Fast As a Shark 3 2
3 4 Restless and Wild 3 2
4 5 Princess of the Dawn 3 2
5 6 Put The Finger On You 1 1
6 7 Let's Get It Up 1 1
7 8 Inject The Venom 1 1
8 9 Snowballed 1 1
9 10 Evil Walks 1 1
GenreId Composer Milliseconds \
0 1 Angus Young, Malcolm Young, Brian Johnson 343719
1 1 None 342562
2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619
3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051
4 1 Deaffy & R.A. Smith-Diesel 375418
5 1 Angus Young, Malcolm Young, Brian Johnson 205662
6 1 Angus Young, Malcolm Young, Brian Johnson 233926
7 1 Angus Young, Malcolm Young, Brian Johnson 210834
8 1 Angus Young, Malcolm Young, Brian Johnson 203102
9 1 Angus Young, Malcolm Young, Brian Johnson 263497
Bytes UnitPrice
0 11170334 0.99
1 5510424 0.99
2 3990994 0.99
3 4331779 0.99
4 6290521 0.99
5 6713451 0.99
6 7636561 0.99
7 6852860 0.99
8 6599424 0.99
9 8611245 0.99
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> len(db.query(q))
3503
db.query(q, limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
>>> template = '''
... SELECT
... '{{ name }}' as table_name,
... COUNT(*) as cnt
... FROM
... {{ name }}
... GROUP BY
... table_name
... '''
>>> data = [
... {"name": "Album"},
... {"name": "Artist"},
... {"name": "Track"}
... ]
>>>
db.query(q, data=data)
table_name cnt
0 Album 347
1 Artist 275
2 Track 3503
>>> q = '''
... SELECT
... {{#cols}}
... {{#if @last}}
... {{ . }}
... {{else}}
... {{ . }} ,
... {{/if}}
... {{/cols}}
... FROM
... Album;
... '''
>>> data = {"cols": ["AlbumId", "Title", "ArtistId"]}
>>> len(db.query(q, data=data, union=False))
347
db.query(q, data=data, union=False)
AlbumId Title ArtistId
0 1 For Those About To Rock We Salute You 1
1 2 Balls to the Wall 2
2 3 Restless and Wild 2
3 4 Let There Be Rock 1
4 5 Big Ones 3
"""
if data:
q = self._apply_handlebars(q, data, union)
if limit:
q = self._assign_limit(q, limit)
return pd.read_sql(q, self.con) | python | def query(self, q, data=None, union=True, limit=None):
"""
Query your database with a raw string.
Parameters
----------
q: str
Query string to execute
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
db.query("select * from Track").head(2)
TrackId Name AlbumId MediaTypeId \\\r
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
<BLANKLINE>
GenreId Composer Milliseconds Bytes \\\r
0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334
1 1 None 342562 5510424
<BLANKLINE>
UnitPrice
0 0.99
1 0.99
db.query("select * from Track", limit=10)
TrackId Name AlbumId MediaTypeId \
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
2 3 Fast As a Shark 3 2
3 4 Restless and Wild 3 2
4 5 Princess of the Dawn 3 2
5 6 Put The Finger On You 1 1
6 7 Let's Get It Up 1 1
7 8 Inject The Venom 1 1
8 9 Snowballed 1 1
9 10 Evil Walks 1 1
GenreId Composer Milliseconds \
0 1 Angus Young, Malcolm Young, Brian Johnson 343719
1 1 None 342562
2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619
3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051
4 1 Deaffy & R.A. Smith-Diesel 375418
5 1 Angus Young, Malcolm Young, Brian Johnson 205662
6 1 Angus Young, Malcolm Young, Brian Johnson 233926
7 1 Angus Young, Malcolm Young, Brian Johnson 210834
8 1 Angus Young, Malcolm Young, Brian Johnson 203102
9 1 Angus Young, Malcolm Young, Brian Johnson 263497
Bytes UnitPrice
0 11170334 0.99
1 5510424 0.99
2 3990994 0.99
3 4331779 0.99
4 6290521 0.99
5 6713451 0.99
6 7636561 0.99
7 6852860 0.99
8 6599424 0.99
9 8611245 0.99
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> len(db.query(q))
3503
db.query(q, limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
>>> template = '''
... SELECT
... '{{ name }}' as table_name,
... COUNT(*) as cnt
... FROM
... {{ name }}
... GROUP BY
... table_name
... '''
>>> data = [
... {"name": "Album"},
... {"name": "Artist"},
... {"name": "Track"}
... ]
>>>
db.query(q, data=data)
table_name cnt
0 Album 347
1 Artist 275
2 Track 3503
>>> q = '''
... SELECT
... {{#cols}}
... {{#if @last}}
... {{ . }}
... {{else}}
... {{ . }} ,
... {{/if}}
... {{/cols}}
... FROM
... Album;
... '''
>>> data = {"cols": ["AlbumId", "Title", "ArtistId"]}
>>> len(db.query(q, data=data, union=False))
347
db.query(q, data=data, union=False)
AlbumId Title ArtistId
0 1 For Those About To Rock We Salute You 1
1 2 Balls to the Wall 2
2 3 Restless and Wild 2
3 4 Let There Be Rock 1
4 5 Big Ones 3
"""
if data:
q = self._apply_handlebars(q, data, union)
if limit:
q = self._assign_limit(q, limit)
return pd.read_sql(q, self.con) | [
"def",
"query",
"(",
"self",
",",
"q",
",",
"data",
"=",
"None",
",",
"union",
"=",
"True",
",",
"limit",
"=",
"None",
")",
":",
"if",
"data",
":",
"q",
"=",
"self",
".",
"_apply_handlebars",
"(",
"q",
",",
"data",
",",
"union",
")",
"if",
"limit",
":",
"q",
"=",
"self",
".",
"_assign_limit",
"(",
"q",
",",
"limit",
")",
"return",
"pd",
".",
"read_sql",
"(",
"q",
",",
"self",
".",
"con",
")"
]
| Query your database with a raw string.
Parameters
----------
q: str
Query string to execute
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
db.query("select * from Track").head(2)
TrackId Name AlbumId MediaTypeId \\\r
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
<BLANKLINE>
GenreId Composer Milliseconds Bytes \\\r
0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334
1 1 None 342562 5510424
<BLANKLINE>
UnitPrice
0 0.99
1 0.99
db.query("select * from Track", limit=10)
TrackId Name AlbumId MediaTypeId \
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
2 3 Fast As a Shark 3 2
3 4 Restless and Wild 3 2
4 5 Princess of the Dawn 3 2
5 6 Put The Finger On You 1 1
6 7 Let's Get It Up 1 1
7 8 Inject The Venom 1 1
8 9 Snowballed 1 1
9 10 Evil Walks 1 1
GenreId Composer Milliseconds \
0 1 Angus Young, Malcolm Young, Brian Johnson 343719
1 1 None 342562
2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619
3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051
4 1 Deaffy & R.A. Smith-Diesel 375418
5 1 Angus Young, Malcolm Young, Brian Johnson 205662
6 1 Angus Young, Malcolm Young, Brian Johnson 233926
7 1 Angus Young, Malcolm Young, Brian Johnson 210834
8 1 Angus Young, Malcolm Young, Brian Johnson 203102
9 1 Angus Young, Malcolm Young, Brian Johnson 263497
Bytes UnitPrice
0 11170334 0.99
1 5510424 0.99
2 3990994 0.99
3 4331779 0.99
4 6290521 0.99
5 6713451 0.99
6 7636561 0.99
7 6852860 0.99
8 6599424 0.99
9 8611245 0.99
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> len(db.query(q))
3503
db.query(q, limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
>>> template = '''
... SELECT
... '{{ name }}' as table_name,
... COUNT(*) as cnt
... FROM
... {{ name }}
... GROUP BY
... table_name
... '''
>>> data = [
... {"name": "Album"},
... {"name": "Artist"},
... {"name": "Track"}
... ]
>>>
db.query(q, data=data)
table_name cnt
0 Album 347
1 Artist 275
2 Track 3503
>>> q = '''
... SELECT
... {{#cols}}
... {{#if @last}}
... {{ . }}
... {{else}}
... {{ . }} ,
... {{/if}}
... {{/cols}}
... FROM
... Album;
... '''
>>> data = {"cols": ["AlbumId", "Title", "ArtistId"]}
>>> len(db.query(q, data=data, union=False))
347
db.query(q, data=data, union=False)
AlbumId Title ArtistId
0 1 For Those About To Rock We Salute You 1
1 2 Balls to the Wall 2
2 3 Restless and Wild 2
3 4 Let There Be Rock 1
4 5 Big Ones 3 | [
"Query",
"your",
"database",
"with",
"a",
"raw",
"string",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L541-L702 | train |
yhat/db.py | db/db.py | DB.query_from_file | def query_from_file(self, filename, data=None, union=True, limit=None):
"""
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
"""
with open(filename) as fp:
q = fp.read()
return self.query(q, data=data, union=union, limit=limit) | python | def query_from_file(self, filename, data=None, union=True, limit=None):
"""
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
"""
with open(filename) as fp:
q = fp.read()
return self.query(q, data=data, union=union, limit=limit) | [
"def",
"query_from_file",
"(",
"self",
",",
"filename",
",",
"data",
"=",
"None",
",",
"union",
"=",
"True",
",",
"limit",
"=",
"None",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"fp",
":",
"q",
"=",
"fp",
".",
"read",
"(",
")",
"return",
"self",
".",
"query",
"(",
"q",
",",
"data",
"=",
"data",
",",
"union",
"=",
"union",
",",
"limit",
"=",
"limit",
")"
]
| Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99 | [
"Query",
"your",
"database",
"from",
"a",
"file",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L704-L771 | train |
yhat/db.py | db/db.py | DB.refresh_schema | def refresh_schema(self, exclude_system_tables=True, use_cache=False):
"""
Pulls your database's schema again and looks for any new tables and
columns.
"""
col_meta, table_meta = self._get_db_metadata(exclude_system_tables, use_cache)
tables = self._gen_tables_from_col_tuples(col_meta)
# Three modes for refreshing schema
# 1. load directly from cache
# 2. use a single query for getting all key relationships
# 3. use the naive approach
if use_cache:
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, table_meta[t]['schema'], t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_meta[t]['foreign_keys']['columns'],
ref_keys=table_meta[t]['ref_keys']['columns'])
for t in sorted(tables.keys())])
# optimize the foreign/ref key query by doing it one time, database-wide, if query is available
elif not use_cache and isinstance(self._query_templates.get('system', {}).get('foreign_keys_for_db', None), str):
self.cur.execute(self._query_templates['system']['foreign_keys_for_db'])
table_db_foreign_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_foreign_keys[rel[1]].append(rel)
self.cur.execute(self._query_templates['system']['ref_keys_for_db'])
table_db_ref_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_ref_keys[rel[1]].append(rel)
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_db_foreign_keys[t], ref_keys=table_db_ref_keys[t])
for t in sorted(tables.keys())])
elif not use_cache:
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column) for t in sorted(tables.keys())])
sys.stderr.write("done!\n") | python | def refresh_schema(self, exclude_system_tables=True, use_cache=False):
"""
Pulls your database's schema again and looks for any new tables and
columns.
"""
col_meta, table_meta = self._get_db_metadata(exclude_system_tables, use_cache)
tables = self._gen_tables_from_col_tuples(col_meta)
# Three modes for refreshing schema
# 1. load directly from cache
# 2. use a single query for getting all key relationships
# 3. use the naive approach
if use_cache:
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, table_meta[t]['schema'], t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_meta[t]['foreign_keys']['columns'],
ref_keys=table_meta[t]['ref_keys']['columns'])
for t in sorted(tables.keys())])
# optimize the foreign/ref key query by doing it one time, database-wide, if query is available
elif not use_cache and isinstance(self._query_templates.get('system', {}).get('foreign_keys_for_db', None), str):
self.cur.execute(self._query_templates['system']['foreign_keys_for_db'])
table_db_foreign_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_foreign_keys[rel[1]].append(rel)
self.cur.execute(self._query_templates['system']['ref_keys_for_db'])
table_db_ref_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_ref_keys[rel[1]].append(rel)
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_db_foreign_keys[t], ref_keys=table_db_ref_keys[t])
for t in sorted(tables.keys())])
elif not use_cache:
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column) for t in sorted(tables.keys())])
sys.stderr.write("done!\n") | [
"def",
"refresh_schema",
"(",
"self",
",",
"exclude_system_tables",
"=",
"True",
",",
"use_cache",
"=",
"False",
")",
":",
"col_meta",
",",
"table_meta",
"=",
"self",
".",
"_get_db_metadata",
"(",
"exclude_system_tables",
",",
"use_cache",
")",
"tables",
"=",
"self",
".",
"_gen_tables_from_col_tuples",
"(",
"col_meta",
")",
"# Three modes for refreshing schema",
"# 1. load directly from cache",
"# 2. use a single query for getting all key relationships",
"# 3. use the naive approach",
"if",
"use_cache",
":",
"# generate our Tables, and load them into a TableSet",
"self",
".",
"_tables",
"=",
"TableSet",
"(",
"[",
"Table",
"(",
"self",
".",
"con",
",",
"self",
".",
"_query_templates",
",",
"table_meta",
"[",
"t",
"]",
"[",
"'schema'",
"]",
",",
"t",
",",
"tables",
"[",
"t",
"]",
",",
"keys_per_column",
"=",
"self",
".",
"keys_per_column",
",",
"foreign_keys",
"=",
"table_meta",
"[",
"t",
"]",
"[",
"'foreign_keys'",
"]",
"[",
"'columns'",
"]",
",",
"ref_keys",
"=",
"table_meta",
"[",
"t",
"]",
"[",
"'ref_keys'",
"]",
"[",
"'columns'",
"]",
")",
"for",
"t",
"in",
"sorted",
"(",
"tables",
".",
"keys",
"(",
")",
")",
"]",
")",
"# optimize the foreign/ref key query by doing it one time, database-wide, if query is available",
"elif",
"not",
"use_cache",
"and",
"isinstance",
"(",
"self",
".",
"_query_templates",
".",
"get",
"(",
"'system'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'foreign_keys_for_db'",
",",
"None",
")",
",",
"str",
")",
":",
"self",
".",
"cur",
".",
"execute",
"(",
"self",
".",
"_query_templates",
"[",
"'system'",
"]",
"[",
"'foreign_keys_for_db'",
"]",
")",
"table_db_foreign_keys",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"rel",
"in",
"self",
".",
"cur",
":",
"# second value in relationship tuple is the table name",
"table_db_foreign_keys",
"[",
"rel",
"[",
"1",
"]",
"]",
".",
"append",
"(",
"rel",
")",
"self",
".",
"cur",
".",
"execute",
"(",
"self",
".",
"_query_templates",
"[",
"'system'",
"]",
"[",
"'ref_keys_for_db'",
"]",
")",
"table_db_ref_keys",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"rel",
"in",
"self",
".",
"cur",
":",
"# second value in relationship tuple is the table name",
"table_db_ref_keys",
"[",
"rel",
"[",
"1",
"]",
"]",
".",
"append",
"(",
"rel",
")",
"# generate our Tables, and load them into a TableSet",
"self",
".",
"_tables",
"=",
"TableSet",
"(",
"[",
"Table",
"(",
"self",
".",
"con",
",",
"self",
".",
"_query_templates",
",",
"tables",
"[",
"t",
"]",
"[",
"0",
"]",
".",
"schema",
",",
"t",
",",
"tables",
"[",
"t",
"]",
",",
"keys_per_column",
"=",
"self",
".",
"keys_per_column",
",",
"foreign_keys",
"=",
"table_db_foreign_keys",
"[",
"t",
"]",
",",
"ref_keys",
"=",
"table_db_ref_keys",
"[",
"t",
"]",
")",
"for",
"t",
"in",
"sorted",
"(",
"tables",
".",
"keys",
"(",
")",
")",
"]",
")",
"elif",
"not",
"use_cache",
":",
"self",
".",
"_tables",
"=",
"TableSet",
"(",
"[",
"Table",
"(",
"self",
".",
"con",
",",
"self",
".",
"_query_templates",
",",
"tables",
"[",
"t",
"]",
"[",
"0",
"]",
".",
"schema",
",",
"t",
",",
"tables",
"[",
"t",
"]",
",",
"keys_per_column",
"=",
"self",
".",
"keys_per_column",
")",
"for",
"t",
"in",
"sorted",
"(",
"tables",
".",
"keys",
"(",
")",
")",
"]",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"done!\\n\"",
")"
]
| Pulls your database's schema again and looks for any new tables and
columns. | [
"Pulls",
"your",
"database",
"s",
"schema",
"again",
"and",
"looks",
"for",
"any",
"new",
"tables",
"and",
"columns",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L809-L854 | train |
yhat/db.py | db/db.py | DB.to_dict | def to_dict(self):
"""Dict representation of the database as credentials plus tables dict representation."""
db_dict = self.credentials
db_dict.update(self.tables.to_dict())
return db_dict | python | def to_dict(self):
"""Dict representation of the database as credentials plus tables dict representation."""
db_dict = self.credentials
db_dict.update(self.tables.to_dict())
return db_dict | [
"def",
"to_dict",
"(",
"self",
")",
":",
"db_dict",
"=",
"self",
".",
"credentials",
"db_dict",
".",
"update",
"(",
"self",
".",
"tables",
".",
"to_dict",
"(",
")",
")",
"return",
"db_dict"
]
| Dict representation of the database as credentials plus tables dict representation. | [
"Dict",
"representation",
"of",
"the",
"database",
"as",
"credentials",
"plus",
"tables",
"dict",
"representation",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/db.py#L1052-L1056 | train |
yhat/db.py | db/utils.py | profile_path | def profile_path(profile_id, profile):
"""Create full path to given provide for the current user."""
user = os.path.expanduser("~")
return os.path.join(user, profile_id + profile) | python | def profile_path(profile_id, profile):
"""Create full path to given provide for the current user."""
user = os.path.expanduser("~")
return os.path.join(user, profile_id + profile) | [
"def",
"profile_path",
"(",
"profile_id",
",",
"profile",
")",
":",
"user",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"user",
",",
"profile_id",
"+",
"profile",
")"
]
| Create full path to given provide for the current user. | [
"Create",
"full",
"path",
"to",
"given",
"provide",
"for",
"the",
"current",
"user",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/utils.py#L5-L8 | train |
yhat/db.py | db/utils.py | load_from_json | def load_from_json(file_path):
"""Load the stored data from json, and return as a dict."""
if os.path.exists(file_path):
raw_data = open(file_path, 'rb').read()
return json.loads(base64.decodestring(raw_data).decode('utf-8')) | python | def load_from_json(file_path):
"""Load the stored data from json, and return as a dict."""
if os.path.exists(file_path):
raw_data = open(file_path, 'rb').read()
return json.loads(base64.decodestring(raw_data).decode('utf-8')) | [
"def",
"load_from_json",
"(",
"file_path",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"file_path",
")",
":",
"raw_data",
"=",
"open",
"(",
"file_path",
",",
"'rb'",
")",
".",
"read",
"(",
")",
"return",
"json",
".",
"loads",
"(",
"base64",
".",
"decodestring",
"(",
"raw_data",
")",
".",
"decode",
"(",
"'utf-8'",
")",
")"
]
| Load the stored data from json, and return as a dict. | [
"Load",
"the",
"stored",
"data",
"from",
"json",
"and",
"return",
"as",
"a",
"dict",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/utils.py#L13-L17 | train |
yhat/db.py | db/s3.py | S3.save_credentials | def save_credentials(self, profile):
"""
Saves credentials to a dotfile so you can open them grab them later.
Parameters
----------
profile: str
name for your profile (i.e. "dev", "prod")
"""
filename = profile_path(S3_PROFILE_ID, profile)
creds = {
"access_key": self.access_key,
"secret_key": self.secret_key
}
dump_to_json(filename, creds) | python | def save_credentials(self, profile):
"""
Saves credentials to a dotfile so you can open them grab them later.
Parameters
----------
profile: str
name for your profile (i.e. "dev", "prod")
"""
filename = profile_path(S3_PROFILE_ID, profile)
creds = {
"access_key": self.access_key,
"secret_key": self.secret_key
}
dump_to_json(filename, creds) | [
"def",
"save_credentials",
"(",
"self",
",",
"profile",
")",
":",
"filename",
"=",
"profile_path",
"(",
"S3_PROFILE_ID",
",",
"profile",
")",
"creds",
"=",
"{",
"\"access_key\"",
":",
"self",
".",
"access_key",
",",
"\"secret_key\"",
":",
"self",
".",
"secret_key",
"}",
"dump_to_json",
"(",
"filename",
",",
"creds",
")"
]
| Saves credentials to a dotfile so you can open them grab them later.
Parameters
----------
profile: str
name for your profile (i.e. "dev", "prod") | [
"Saves",
"credentials",
"to",
"a",
"dotfile",
"so",
"you",
"can",
"open",
"them",
"grab",
"them",
"later",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/s3.py#L16-L30 | train |
yhat/db.py | db/column.py | Column.to_dict | def to_dict(self):
"""
Serialize representation of the column for local caching.
"""
return {'schema': self.schema, 'table': self.table, 'name': self.name, 'type': self.type} | python | def to_dict(self):
"""
Serialize representation of the column for local caching.
"""
return {'schema': self.schema, 'table': self.table, 'name': self.name, 'type': self.type} | [
"def",
"to_dict",
"(",
"self",
")",
":",
"return",
"{",
"'schema'",
":",
"self",
".",
"schema",
",",
"'table'",
":",
"self",
".",
"table",
",",
"'name'",
":",
"self",
".",
"name",
",",
"'type'",
":",
"self",
".",
"type",
"}"
]
| Serialize representation of the column for local caching. | [
"Serialize",
"representation",
"of",
"the",
"column",
"for",
"local",
"caching",
"."
]
| df2dbb8ef947c2d4253d31f29eb58c4084daffc5 | https://github.com/yhat/db.py/blob/df2dbb8ef947c2d4253d31f29eb58c4084daffc5/db/column.py#L192-L196 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.rate_limit | def rate_limit(self, rate_limit):
""" Turn on or off rate limiting """
self._rate_limit = bool(rate_limit)
self._rate_limit_last_call = None
self.clear_memoized() | python | def rate_limit(self, rate_limit):
""" Turn on or off rate limiting """
self._rate_limit = bool(rate_limit)
self._rate_limit_last_call = None
self.clear_memoized() | [
"def",
"rate_limit",
"(",
"self",
",",
"rate_limit",
")",
":",
"self",
".",
"_rate_limit",
"=",
"bool",
"(",
"rate_limit",
")",
"self",
".",
"_rate_limit_last_call",
"=",
"None",
"self",
".",
"clear_memoized",
"(",
")"
]
| Turn on or off rate limiting | [
"Turn",
"on",
"or",
"off",
"rate",
"limiting"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L136-L140 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.language | def language(self, lang):
""" Set the language to use; attempts to change the API URL """
lang = lang.lower()
if self._lang == lang:
return
url = self._api_url
tmp = url.replace("/{0}.".format(self._lang), "/{0}.".format(lang))
self._api_url = tmp
self._lang = lang
self.clear_memoized() | python | def language(self, lang):
""" Set the language to use; attempts to change the API URL """
lang = lang.lower()
if self._lang == lang:
return
url = self._api_url
tmp = url.replace("/{0}.".format(self._lang), "/{0}.".format(lang))
self._api_url = tmp
self._lang = lang
self.clear_memoized() | [
"def",
"language",
"(",
"self",
",",
"lang",
")",
":",
"lang",
"=",
"lang",
".",
"lower",
"(",
")",
"if",
"self",
".",
"_lang",
"==",
"lang",
":",
"return",
"url",
"=",
"self",
".",
"_api_url",
"tmp",
"=",
"url",
".",
"replace",
"(",
"\"/{0}.\"",
".",
"format",
"(",
"self",
".",
"_lang",
")",
",",
"\"/{0}.\"",
".",
"format",
"(",
"lang",
")",
")",
"self",
".",
"_api_url",
"=",
"tmp",
"self",
".",
"_lang",
"=",
"lang",
"self",
".",
"clear_memoized",
"(",
")"
]
| Set the language to use; attempts to change the API URL | [
"Set",
"the",
"language",
"to",
"use",
";",
"attempts",
"to",
"change",
"the",
"API",
"URL"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L197-L208 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.refresh_interval | def refresh_interval(self, refresh_interval):
""" Set the new cache refresh interval """
if isinstance(refresh_interval, int) and refresh_interval > 0:
self._refresh_interval = refresh_interval
else:
self._refresh_interval = None | python | def refresh_interval(self, refresh_interval):
""" Set the new cache refresh interval """
if isinstance(refresh_interval, int) and refresh_interval > 0:
self._refresh_interval = refresh_interval
else:
self._refresh_interval = None | [
"def",
"refresh_interval",
"(",
"self",
",",
"refresh_interval",
")",
":",
"if",
"isinstance",
"(",
"refresh_interval",
",",
"int",
")",
"and",
"refresh_interval",
">",
"0",
":",
"self",
".",
"_refresh_interval",
"=",
"refresh_interval",
"else",
":",
"self",
".",
"_refresh_interval",
"=",
"None"
]
| Set the new cache refresh interval | [
"Set",
"the",
"new",
"cache",
"refresh",
"interval"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L262-L267 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.login | def login(self, username, password, strict=True):
""" Login as specified user
Args:
username (str): The username to log in with
password (str): The password for the user
strict (bool): `True` to thow an error on failure
Returns:
bool: `True` if successfully logged in; `False` otherwise
Raises:
:py:func:`mediawiki.exceptions.MediaWikiLoginError`: if \
unable to login
Note:
Per the MediaWiki API, one should use the `bot password`; \
see https://www.mediawiki.org/wiki/API:Login for more \
information """
# get login token
params = {
"action": "query",
"meta": "tokens",
"type": "login",
"format": "json",
}
token_res = self._get_response(params)
if "query" in token_res and "tokens" in token_res["query"]:
token = token_res["query"]["tokens"]["logintoken"]
params = {
"action": "login",
"lgname": username,
"lgpassword": password,
"lgtoken": token,
"format": "json",
}
res = self._post_response(params)
if res["login"]["result"] == "Success":
self._is_logged_in = True
return True
self._is_logged_in = False
reason = res["login"]["reason"]
if strict:
msg = "MediaWiki login failure: {}".format(reason)
raise MediaWikiLoginError(msg)
return False | python | def login(self, username, password, strict=True):
""" Login as specified user
Args:
username (str): The username to log in with
password (str): The password for the user
strict (bool): `True` to thow an error on failure
Returns:
bool: `True` if successfully logged in; `False` otherwise
Raises:
:py:func:`mediawiki.exceptions.MediaWikiLoginError`: if \
unable to login
Note:
Per the MediaWiki API, one should use the `bot password`; \
see https://www.mediawiki.org/wiki/API:Login for more \
information """
# get login token
params = {
"action": "query",
"meta": "tokens",
"type": "login",
"format": "json",
}
token_res = self._get_response(params)
if "query" in token_res and "tokens" in token_res["query"]:
token = token_res["query"]["tokens"]["logintoken"]
params = {
"action": "login",
"lgname": username,
"lgpassword": password,
"lgtoken": token,
"format": "json",
}
res = self._post_response(params)
if res["login"]["result"] == "Success":
self._is_logged_in = True
return True
self._is_logged_in = False
reason = res["login"]["reason"]
if strict:
msg = "MediaWiki login failure: {}".format(reason)
raise MediaWikiLoginError(msg)
return False | [
"def",
"login",
"(",
"self",
",",
"username",
",",
"password",
",",
"strict",
"=",
"True",
")",
":",
"# get login token",
"params",
"=",
"{",
"\"action\"",
":",
"\"query\"",
",",
"\"meta\"",
":",
"\"tokens\"",
",",
"\"type\"",
":",
"\"login\"",
",",
"\"format\"",
":",
"\"json\"",
",",
"}",
"token_res",
"=",
"self",
".",
"_get_response",
"(",
"params",
")",
"if",
"\"query\"",
"in",
"token_res",
"and",
"\"tokens\"",
"in",
"token_res",
"[",
"\"query\"",
"]",
":",
"token",
"=",
"token_res",
"[",
"\"query\"",
"]",
"[",
"\"tokens\"",
"]",
"[",
"\"logintoken\"",
"]",
"params",
"=",
"{",
"\"action\"",
":",
"\"login\"",
",",
"\"lgname\"",
":",
"username",
",",
"\"lgpassword\"",
":",
"password",
",",
"\"lgtoken\"",
":",
"token",
",",
"\"format\"",
":",
"\"json\"",
",",
"}",
"res",
"=",
"self",
".",
"_post_response",
"(",
"params",
")",
"if",
"res",
"[",
"\"login\"",
"]",
"[",
"\"result\"",
"]",
"==",
"\"Success\"",
":",
"self",
".",
"_is_logged_in",
"=",
"True",
"return",
"True",
"self",
".",
"_is_logged_in",
"=",
"False",
"reason",
"=",
"res",
"[",
"\"login\"",
"]",
"[",
"\"reason\"",
"]",
"if",
"strict",
":",
"msg",
"=",
"\"MediaWiki login failure: {}\"",
".",
"format",
"(",
"reason",
")",
"raise",
"MediaWikiLoginError",
"(",
"msg",
")",
"return",
"False"
]
| Login as specified user
Args:
username (str): The username to log in with
password (str): The password for the user
strict (bool): `True` to thow an error on failure
Returns:
bool: `True` if successfully logged in; `False` otherwise
Raises:
:py:func:`mediawiki.exceptions.MediaWikiLoginError`: if \
unable to login
Note:
Per the MediaWiki API, one should use the `bot password`; \
see https://www.mediawiki.org/wiki/API:Login for more \
information | [
"Login",
"as",
"specified",
"user"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L269-L314 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.set_api_url | def set_api_url(self, api_url="https://{lang}.wikipedia.org/w/api.php", lang="en"):
""" Set the API URL and language
Args:
api_url (str): API URL to use
lang (str): Language of the API URL
Raises:
:py:func:`mediawiki.exceptions.MediaWikiAPIURLError`: if the \
url is not a valid MediaWiki site """
old_api_url = self._api_url
old_lang = self._lang
self._lang = lang.lower()
self._api_url = api_url.format(lang=self._lang)
try:
self._get_site_info()
self.__supported_languages = None # reset this
except MediaWikiException:
# reset api url and lang in the event that the exception was caught
self._api_url = old_api_url
self._lang = old_lang
raise MediaWikiAPIURLError(api_url)
self.clear_memoized() | python | def set_api_url(self, api_url="https://{lang}.wikipedia.org/w/api.php", lang="en"):
""" Set the API URL and language
Args:
api_url (str): API URL to use
lang (str): Language of the API URL
Raises:
:py:func:`mediawiki.exceptions.MediaWikiAPIURLError`: if the \
url is not a valid MediaWiki site """
old_api_url = self._api_url
old_lang = self._lang
self._lang = lang.lower()
self._api_url = api_url.format(lang=self._lang)
try:
self._get_site_info()
self.__supported_languages = None # reset this
except MediaWikiException:
# reset api url and lang in the event that the exception was caught
self._api_url = old_api_url
self._lang = old_lang
raise MediaWikiAPIURLError(api_url)
self.clear_memoized() | [
"def",
"set_api_url",
"(",
"self",
",",
"api_url",
"=",
"\"https://{lang}.wikipedia.org/w/api.php\"",
",",
"lang",
"=",
"\"en\"",
")",
":",
"old_api_url",
"=",
"self",
".",
"_api_url",
"old_lang",
"=",
"self",
".",
"_lang",
"self",
".",
"_lang",
"=",
"lang",
".",
"lower",
"(",
")",
"self",
".",
"_api_url",
"=",
"api_url",
".",
"format",
"(",
"lang",
"=",
"self",
".",
"_lang",
")",
"try",
":",
"self",
".",
"_get_site_info",
"(",
")",
"self",
".",
"__supported_languages",
"=",
"None",
"# reset this",
"except",
"MediaWikiException",
":",
"# reset api url and lang in the event that the exception was caught",
"self",
".",
"_api_url",
"=",
"old_api_url",
"self",
".",
"_lang",
"=",
"old_lang",
"raise",
"MediaWikiAPIURLError",
"(",
"api_url",
")",
"self",
".",
"clear_memoized",
"(",
")"
]
| Set the API URL and language
Args:
api_url (str): API URL to use
lang (str): Language of the API URL
Raises:
:py:func:`mediawiki.exceptions.MediaWikiAPIURLError`: if the \
url is not a valid MediaWiki site | [
"Set",
"the",
"API",
"URL",
"and",
"language"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L317-L338 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki._reset_session | def _reset_session(self):
""" Set session information """
headers = {"User-Agent": self._user_agent}
self._session = requests.Session()
self._session.headers.update(headers)
self._is_logged_in = False | python | def _reset_session(self):
""" Set session information """
headers = {"User-Agent": self._user_agent}
self._session = requests.Session()
self._session.headers.update(headers)
self._is_logged_in = False | [
"def",
"_reset_session",
"(",
"self",
")",
":",
"headers",
"=",
"{",
"\"User-Agent\"",
":",
"self",
".",
"_user_agent",
"}",
"self",
".",
"_session",
"=",
"requests",
".",
"Session",
"(",
")",
"self",
".",
"_session",
".",
"headers",
".",
"update",
"(",
"headers",
")",
"self",
".",
"_is_logged_in",
"=",
"False"
]
| Set session information | [
"Set",
"session",
"information"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L340-L345 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.random | def random(self, pages=1):
""" Request a random page title or list of random titles
Args:
pages (int): Number of random pages to return
Returns:
list or int: A list of random page titles or a random page \
title if pages = 1 """
if pages is None or pages < 1:
raise ValueError("Number of pages must be greater than 0")
query_params = {"list": "random", "rnnamespace": 0, "rnlimit": pages}
request = self.wiki_request(query_params)
titles = [page["title"] for page in request["query"]["random"]]
if len(titles) == 1:
return titles[0]
return titles | python | def random(self, pages=1):
""" Request a random page title or list of random titles
Args:
pages (int): Number of random pages to return
Returns:
list or int: A list of random page titles or a random page \
title if pages = 1 """
if pages is None or pages < 1:
raise ValueError("Number of pages must be greater than 0")
query_params = {"list": "random", "rnnamespace": 0, "rnlimit": pages}
request = self.wiki_request(query_params)
titles = [page["title"] for page in request["query"]["random"]]
if len(titles) == 1:
return titles[0]
return titles | [
"def",
"random",
"(",
"self",
",",
"pages",
"=",
"1",
")",
":",
"if",
"pages",
"is",
"None",
"or",
"pages",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"\"Number of pages must be greater than 0\"",
")",
"query_params",
"=",
"{",
"\"list\"",
":",
"\"random\"",
",",
"\"rnnamespace\"",
":",
"0",
",",
"\"rnlimit\"",
":",
"pages",
"}",
"request",
"=",
"self",
".",
"wiki_request",
"(",
"query_params",
")",
"titles",
"=",
"[",
"page",
"[",
"\"title\"",
"]",
"for",
"page",
"in",
"request",
"[",
"\"query\"",
"]",
"[",
"\"random\"",
"]",
"]",
"if",
"len",
"(",
"titles",
")",
"==",
"1",
":",
"return",
"titles",
"[",
"0",
"]",
"return",
"titles"
]
| Request a random page title or list of random titles
Args:
pages (int): Number of random pages to return
Returns:
list or int: A list of random page titles or a random page \
title if pages = 1 | [
"Request",
"a",
"random",
"page",
"title",
"or",
"list",
"of",
"random",
"titles"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L371-L389 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.search | def search(self, query, results=10, suggestion=False):
""" Search for similar titles
Args:
query (str): Page title
results (int): Number of pages to return
suggestion (bool): Use suggestion
Returns:
tuple or list: tuple (list results, suggestion) if \
suggestion is **True**; list of results \
otherwise """
self._check_query(query, "Query must be specified")
search_params = {
"list": "search",
"srprop": "",
"srlimit": results,
"srsearch": query,
}
if suggestion:
search_params["srinfo"] = "suggestion"
raw_results = self.wiki_request(search_params)
self._check_error_response(raw_results, query)
search_results = [d["title"] for d in raw_results["query"]["search"]]
if suggestion:
sug = None
if raw_results["query"].get("searchinfo"):
sug = raw_results["query"]["searchinfo"]["suggestion"]
return search_results, sug
return search_results | python | def search(self, query, results=10, suggestion=False):
""" Search for similar titles
Args:
query (str): Page title
results (int): Number of pages to return
suggestion (bool): Use suggestion
Returns:
tuple or list: tuple (list results, suggestion) if \
suggestion is **True**; list of results \
otherwise """
self._check_query(query, "Query must be specified")
search_params = {
"list": "search",
"srprop": "",
"srlimit": results,
"srsearch": query,
}
if suggestion:
search_params["srinfo"] = "suggestion"
raw_results = self.wiki_request(search_params)
self._check_error_response(raw_results, query)
search_results = [d["title"] for d in raw_results["query"]["search"]]
if suggestion:
sug = None
if raw_results["query"].get("searchinfo"):
sug = raw_results["query"]["searchinfo"]["suggestion"]
return search_results, sug
return search_results | [
"def",
"search",
"(",
"self",
",",
"query",
",",
"results",
"=",
"10",
",",
"suggestion",
"=",
"False",
")",
":",
"self",
".",
"_check_query",
"(",
"query",
",",
"\"Query must be specified\"",
")",
"search_params",
"=",
"{",
"\"list\"",
":",
"\"search\"",
",",
"\"srprop\"",
":",
"\"\"",
",",
"\"srlimit\"",
":",
"results",
",",
"\"srsearch\"",
":",
"query",
",",
"}",
"if",
"suggestion",
":",
"search_params",
"[",
"\"srinfo\"",
"]",
"=",
"\"suggestion\"",
"raw_results",
"=",
"self",
".",
"wiki_request",
"(",
"search_params",
")",
"self",
".",
"_check_error_response",
"(",
"raw_results",
",",
"query",
")",
"search_results",
"=",
"[",
"d",
"[",
"\"title\"",
"]",
"for",
"d",
"in",
"raw_results",
"[",
"\"query\"",
"]",
"[",
"\"search\"",
"]",
"]",
"if",
"suggestion",
":",
"sug",
"=",
"None",
"if",
"raw_results",
"[",
"\"query\"",
"]",
".",
"get",
"(",
"\"searchinfo\"",
")",
":",
"sug",
"=",
"raw_results",
"[",
"\"query\"",
"]",
"[",
"\"searchinfo\"",
"]",
"[",
"\"suggestion\"",
"]",
"return",
"search_results",
",",
"sug",
"return",
"search_results"
]
| Search for similar titles
Args:
query (str): Page title
results (int): Number of pages to return
suggestion (bool): Use suggestion
Returns:
tuple or list: tuple (list results, suggestion) if \
suggestion is **True**; list of results \
otherwise | [
"Search",
"for",
"similar",
"titles"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L392-L426 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.suggest | def suggest(self, query):
""" Gather suggestions based on the provided title or None if no
suggestions found
Args:
query (str): Page title
Returns:
String or None: Suggested page title or **None** if no \
suggestion found """
res, suggest = self.search(query, results=1, suggestion=True)
try:
title = suggest or res[0]
except IndexError: # page doesn't exist
title = None
return title | python | def suggest(self, query):
""" Gather suggestions based on the provided title or None if no
suggestions found
Args:
query (str): Page title
Returns:
String or None: Suggested page title or **None** if no \
suggestion found """
res, suggest = self.search(query, results=1, suggestion=True)
try:
title = suggest or res[0]
except IndexError: # page doesn't exist
title = None
return title | [
"def",
"suggest",
"(",
"self",
",",
"query",
")",
":",
"res",
",",
"suggest",
"=",
"self",
".",
"search",
"(",
"query",
",",
"results",
"=",
"1",
",",
"suggestion",
"=",
"True",
")",
"try",
":",
"title",
"=",
"suggest",
"or",
"res",
"[",
"0",
"]",
"except",
"IndexError",
":",
"# page doesn't exist",
"title",
"=",
"None",
"return",
"title"
]
| Gather suggestions based on the provided title or None if no
suggestions found
Args:
query (str): Page title
Returns:
String or None: Suggested page title or **None** if no \
suggestion found | [
"Gather",
"suggestions",
"based",
"on",
"the",
"provided",
"title",
"or",
"None",
"if",
"no",
"suggestions",
"found"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L429-L443 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.geosearch | def geosearch(
self,
latitude=None,
longitude=None,
radius=1000,
title=None,
auto_suggest=True,
results=10,
):
""" Search for pages that relate to the provided geocoords or near
the page
Args:
latitude (Decimal or None): Latitude geocoord; must be \
coercable to decimal
longitude (Decimal or None): Longitude geocoord; must be \
coercable to decimal
radius (int): Radius around page or geocoords to pull back; \
in meters
title (str): Page title to use as a geocoordinate; this has \
precedence over lat/long
auto_suggest (bool): Auto-suggest the page title
results (int): Number of pages within the radius to return
Returns:
list: A listing of page titles
Raises:
ValueError: If either the passed latitutde or longitude are \
not coercable to a Decimal """
def test_lat_long(val):
""" handle testing lat and long """
if not isinstance(val, Decimal):
error = (
"Latitude and Longitude must be specified either as "
"a Decimal or in formats that can be coerced into "
"a Decimal."
)
try:
return Decimal(val)
except (DecimalException, TypeError):
raise ValueError(error)
return val
# end local function
params = {"list": "geosearch", "gsradius": radius, "gslimit": results}
if title is not None:
if auto_suggest:
title = self.suggest(title)
params["gspage"] = title
else:
lat = test_lat_long(latitude)
lon = test_lat_long(longitude)
params["gscoord"] = "{0}|{1}".format(lat, lon)
raw_results = self.wiki_request(params)
self._check_error_response(raw_results, title)
return [d["title"] for d in raw_results["query"]["geosearch"]] | python | def geosearch(
self,
latitude=None,
longitude=None,
radius=1000,
title=None,
auto_suggest=True,
results=10,
):
""" Search for pages that relate to the provided geocoords or near
the page
Args:
latitude (Decimal or None): Latitude geocoord; must be \
coercable to decimal
longitude (Decimal or None): Longitude geocoord; must be \
coercable to decimal
radius (int): Radius around page or geocoords to pull back; \
in meters
title (str): Page title to use as a geocoordinate; this has \
precedence over lat/long
auto_suggest (bool): Auto-suggest the page title
results (int): Number of pages within the radius to return
Returns:
list: A listing of page titles
Raises:
ValueError: If either the passed latitutde or longitude are \
not coercable to a Decimal """
def test_lat_long(val):
""" handle testing lat and long """
if not isinstance(val, Decimal):
error = (
"Latitude and Longitude must be specified either as "
"a Decimal or in formats that can be coerced into "
"a Decimal."
)
try:
return Decimal(val)
except (DecimalException, TypeError):
raise ValueError(error)
return val
# end local function
params = {"list": "geosearch", "gsradius": radius, "gslimit": results}
if title is not None:
if auto_suggest:
title = self.suggest(title)
params["gspage"] = title
else:
lat = test_lat_long(latitude)
lon = test_lat_long(longitude)
params["gscoord"] = "{0}|{1}".format(lat, lon)
raw_results = self.wiki_request(params)
self._check_error_response(raw_results, title)
return [d["title"] for d in raw_results["query"]["geosearch"]] | [
"def",
"geosearch",
"(",
"self",
",",
"latitude",
"=",
"None",
",",
"longitude",
"=",
"None",
",",
"radius",
"=",
"1000",
",",
"title",
"=",
"None",
",",
"auto_suggest",
"=",
"True",
",",
"results",
"=",
"10",
",",
")",
":",
"def",
"test_lat_long",
"(",
"val",
")",
":",
"\"\"\" handle testing lat and long \"\"\"",
"if",
"not",
"isinstance",
"(",
"val",
",",
"Decimal",
")",
":",
"error",
"=",
"(",
"\"Latitude and Longitude must be specified either as \"",
"\"a Decimal or in formats that can be coerced into \"",
"\"a Decimal.\"",
")",
"try",
":",
"return",
"Decimal",
"(",
"val",
")",
"except",
"(",
"DecimalException",
",",
"TypeError",
")",
":",
"raise",
"ValueError",
"(",
"error",
")",
"return",
"val",
"# end local function",
"params",
"=",
"{",
"\"list\"",
":",
"\"geosearch\"",
",",
"\"gsradius\"",
":",
"radius",
",",
"\"gslimit\"",
":",
"results",
"}",
"if",
"title",
"is",
"not",
"None",
":",
"if",
"auto_suggest",
":",
"title",
"=",
"self",
".",
"suggest",
"(",
"title",
")",
"params",
"[",
"\"gspage\"",
"]",
"=",
"title",
"else",
":",
"lat",
"=",
"test_lat_long",
"(",
"latitude",
")",
"lon",
"=",
"test_lat_long",
"(",
"longitude",
")",
"params",
"[",
"\"gscoord\"",
"]",
"=",
"\"{0}|{1}\"",
".",
"format",
"(",
"lat",
",",
"lon",
")",
"raw_results",
"=",
"self",
".",
"wiki_request",
"(",
"params",
")",
"self",
".",
"_check_error_response",
"(",
"raw_results",
",",
"title",
")",
"return",
"[",
"d",
"[",
"\"title\"",
"]",
"for",
"d",
"in",
"raw_results",
"[",
"\"query\"",
"]",
"[",
"\"geosearch\"",
"]",
"]"
]
| Search for pages that relate to the provided geocoords or near
the page
Args:
latitude (Decimal or None): Latitude geocoord; must be \
coercable to decimal
longitude (Decimal or None): Longitude geocoord; must be \
coercable to decimal
radius (int): Radius around page or geocoords to pull back; \
in meters
title (str): Page title to use as a geocoordinate; this has \
precedence over lat/long
auto_suggest (bool): Auto-suggest the page title
results (int): Number of pages within the radius to return
Returns:
list: A listing of page titles
Raises:
ValueError: If either the passed latitutde or longitude are \
not coercable to a Decimal | [
"Search",
"for",
"pages",
"that",
"relate",
"to",
"the",
"provided",
"geocoords",
"or",
"near",
"the",
"page"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L446-L505 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.opensearch | def opensearch(self, query, results=10, redirect=True):
""" Execute a MediaWiki opensearch request, similar to search box
suggestions and conforming to the OpenSearch specification
Args:
query (str): Title to search for
results (int): Number of pages within the radius to return
redirect (bool): If **False** return the redirect itself, \
otherwise resolve redirects
Returns:
List: List of results that are stored in a tuple \
(Title, Summary, URL) """
self._check_query(query, "Query must be specified")
query_params = {
"action": "opensearch",
"search": query,
"limit": (100 if results > 100 else results),
"redirects": ("resolve" if redirect else "return"),
"warningsaserror": True,
"namespace": "",
}
results = self.wiki_request(query_params)
self._check_error_response(results, query)
res = list()
for i, item in enumerate(results[1]):
res.append((item, results[2][i], results[3][i]))
return res | python | def opensearch(self, query, results=10, redirect=True):
""" Execute a MediaWiki opensearch request, similar to search box
suggestions and conforming to the OpenSearch specification
Args:
query (str): Title to search for
results (int): Number of pages within the radius to return
redirect (bool): If **False** return the redirect itself, \
otherwise resolve redirects
Returns:
List: List of results that are stored in a tuple \
(Title, Summary, URL) """
self._check_query(query, "Query must be specified")
query_params = {
"action": "opensearch",
"search": query,
"limit": (100 if results > 100 else results),
"redirects": ("resolve" if redirect else "return"),
"warningsaserror": True,
"namespace": "",
}
results = self.wiki_request(query_params)
self._check_error_response(results, query)
res = list()
for i, item in enumerate(results[1]):
res.append((item, results[2][i], results[3][i]))
return res | [
"def",
"opensearch",
"(",
"self",
",",
"query",
",",
"results",
"=",
"10",
",",
"redirect",
"=",
"True",
")",
":",
"self",
".",
"_check_query",
"(",
"query",
",",
"\"Query must be specified\"",
")",
"query_params",
"=",
"{",
"\"action\"",
":",
"\"opensearch\"",
",",
"\"search\"",
":",
"query",
",",
"\"limit\"",
":",
"(",
"100",
"if",
"results",
">",
"100",
"else",
"results",
")",
",",
"\"redirects\"",
":",
"(",
"\"resolve\"",
"if",
"redirect",
"else",
"\"return\"",
")",
",",
"\"warningsaserror\"",
":",
"True",
",",
"\"namespace\"",
":",
"\"\"",
",",
"}",
"results",
"=",
"self",
".",
"wiki_request",
"(",
"query_params",
")",
"self",
".",
"_check_error_response",
"(",
"results",
",",
"query",
")",
"res",
"=",
"list",
"(",
")",
"for",
"i",
",",
"item",
"in",
"enumerate",
"(",
"results",
"[",
"1",
"]",
")",
":",
"res",
".",
"append",
"(",
"(",
"item",
",",
"results",
"[",
"2",
"]",
"[",
"i",
"]",
",",
"results",
"[",
"3",
"]",
"[",
"i",
"]",
")",
")",
"return",
"res"
]
| Execute a MediaWiki opensearch request, similar to search box
suggestions and conforming to the OpenSearch specification
Args:
query (str): Title to search for
results (int): Number of pages within the radius to return
redirect (bool): If **False** return the redirect itself, \
otherwise resolve redirects
Returns:
List: List of results that are stored in a tuple \
(Title, Summary, URL) | [
"Execute",
"a",
"MediaWiki",
"opensearch",
"request",
"similar",
"to",
"search",
"box",
"suggestions",
"and",
"conforming",
"to",
"the",
"OpenSearch",
"specification"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L508-L540 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.prefixsearch | def prefixsearch(self, prefix, results=10):
""" Perform a prefix search using the provided prefix string
Args:
prefix (str): Prefix string to use for search
results (int): Number of pages with the prefix to return
Returns:
list: List of page titles
Note:
**Per the documentation:** "The purpose of this module is \
similar to action=opensearch: to take user input and provide \
the best-matching titles. Depending on the search engine \
backend, this might include typo correction, redirect \
avoidance, or other heuristics." """
self._check_query(prefix, "Prefix must be specified")
query_params = {
"list": "prefixsearch",
"pssearch": prefix,
"pslimit": ("max" if results > 500 else results),
"psnamespace": 0,
"psoffset": 0, # parameterize to skip to later in the list?
}
raw_results = self.wiki_request(query_params)
self._check_error_response(raw_results, prefix)
return [rec["title"] for rec in raw_results["query"]["prefixsearch"]] | python | def prefixsearch(self, prefix, results=10):
""" Perform a prefix search using the provided prefix string
Args:
prefix (str): Prefix string to use for search
results (int): Number of pages with the prefix to return
Returns:
list: List of page titles
Note:
**Per the documentation:** "The purpose of this module is \
similar to action=opensearch: to take user input and provide \
the best-matching titles. Depending on the search engine \
backend, this might include typo correction, redirect \
avoidance, or other heuristics." """
self._check_query(prefix, "Prefix must be specified")
query_params = {
"list": "prefixsearch",
"pssearch": prefix,
"pslimit": ("max" if results > 500 else results),
"psnamespace": 0,
"psoffset": 0, # parameterize to skip to later in the list?
}
raw_results = self.wiki_request(query_params)
self._check_error_response(raw_results, prefix)
return [rec["title"] for rec in raw_results["query"]["prefixsearch"]] | [
"def",
"prefixsearch",
"(",
"self",
",",
"prefix",
",",
"results",
"=",
"10",
")",
":",
"self",
".",
"_check_query",
"(",
"prefix",
",",
"\"Prefix must be specified\"",
")",
"query_params",
"=",
"{",
"\"list\"",
":",
"\"prefixsearch\"",
",",
"\"pssearch\"",
":",
"prefix",
",",
"\"pslimit\"",
":",
"(",
"\"max\"",
"if",
"results",
">",
"500",
"else",
"results",
")",
",",
"\"psnamespace\"",
":",
"0",
",",
"\"psoffset\"",
":",
"0",
",",
"# parameterize to skip to later in the list?",
"}",
"raw_results",
"=",
"self",
".",
"wiki_request",
"(",
"query_params",
")",
"self",
".",
"_check_error_response",
"(",
"raw_results",
",",
"prefix",
")",
"return",
"[",
"rec",
"[",
"\"title\"",
"]",
"for",
"rec",
"in",
"raw_results",
"[",
"\"query\"",
"]",
"[",
"\"prefixsearch\"",
"]",
"]"
]
| Perform a prefix search using the provided prefix string
Args:
prefix (str): Prefix string to use for search
results (int): Number of pages with the prefix to return
Returns:
list: List of page titles
Note:
**Per the documentation:** "The purpose of this module is \
similar to action=opensearch: to take user input and provide \
the best-matching titles. Depending on the search engine \
backend, this might include typo correction, redirect \
avoidance, or other heuristics." | [
"Perform",
"a",
"prefix",
"search",
"using",
"the",
"provided",
"prefix",
"string"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L543-L572 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.summary | def summary(self, title, sentences=0, chars=0, auto_suggest=True, redirect=True):
""" Get the summary for the title in question
Args:
title (str): Page title to summarize
sentences (int): Number of sentences to return in summary
chars (int): Number of characters to return in summary
auto_suggest (bool): Run auto-suggest on title before \
summarizing
redirect (bool): Use page redirect on title before summarizing
Returns:
str: The summarized results of the page
Note:
Precedence for parameters: sentences then chars; if both are \
0 then the entire first section is returned """
page_info = self.page(title, auto_suggest=auto_suggest, redirect=redirect)
return page_info.summarize(sentences, chars) | python | def summary(self, title, sentences=0, chars=0, auto_suggest=True, redirect=True):
""" Get the summary for the title in question
Args:
title (str): Page title to summarize
sentences (int): Number of sentences to return in summary
chars (int): Number of characters to return in summary
auto_suggest (bool): Run auto-suggest on title before \
summarizing
redirect (bool): Use page redirect on title before summarizing
Returns:
str: The summarized results of the page
Note:
Precedence for parameters: sentences then chars; if both are \
0 then the entire first section is returned """
page_info = self.page(title, auto_suggest=auto_suggest, redirect=redirect)
return page_info.summarize(sentences, chars) | [
"def",
"summary",
"(",
"self",
",",
"title",
",",
"sentences",
"=",
"0",
",",
"chars",
"=",
"0",
",",
"auto_suggest",
"=",
"True",
",",
"redirect",
"=",
"True",
")",
":",
"page_info",
"=",
"self",
".",
"page",
"(",
"title",
",",
"auto_suggest",
"=",
"auto_suggest",
",",
"redirect",
"=",
"redirect",
")",
"return",
"page_info",
".",
"summarize",
"(",
"sentences",
",",
"chars",
")"
]
| Get the summary for the title in question
Args:
title (str): Page title to summarize
sentences (int): Number of sentences to return in summary
chars (int): Number of characters to return in summary
auto_suggest (bool): Run auto-suggest on title before \
summarizing
redirect (bool): Use page redirect on title before summarizing
Returns:
str: The summarized results of the page
Note:
Precedence for parameters: sentences then chars; if both are \
0 then the entire first section is returned | [
"Get",
"the",
"summary",
"for",
"the",
"title",
"in",
"question"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L575-L591 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.categorytree | def categorytree(self, category, depth=5):
""" Generate the Category Tree for the given categories
Args:
category(str or list of strings): Category name(s)
depth(int): Depth to traverse the tree
Returns:
dict: Category tree structure
Note:
Set depth to **None** to get the whole tree
Note:
Return Data Structure: Subcategory contains the same \
recursive structure
>>> {
'category': {
'depth': Number,
'links': list,
'parent-categories': list,
'sub-categories': dict
}
}
.. versionadded:: 0.3.10 """
def __cat_tree_rec(cat, depth, tree, level, categories, links):
""" recursive function to build out the tree """
tree[cat] = dict()
tree[cat]["depth"] = level
tree[cat]["sub-categories"] = dict()
tree[cat]["links"] = list()
tree[cat]["parent-categories"] = list()
parent_cats = list()
if cat not in categories:
tries = 0
while True:
if tries > 10:
raise MediaWikiCategoryTreeError(cat)
try:
pag = self.page("{0}:{1}".format(self.category_prefix, cat))
categories[cat] = pag
parent_cats = categories[cat].categories
links[cat] = self.categorymembers(
cat, results=None, subcategories=True
)
break
except PageError:
raise PageError("{0}:{1}".format(self.category_prefix, cat))
except KeyboardInterrupt:
raise
except Exception:
tries = tries + 1
time.sleep(1)
else:
parent_cats = categories[cat].categories
tree[cat]["parent-categories"].extend(parent_cats)
tree[cat]["links"].extend(links[cat][0])
if depth and level >= depth:
for ctg in links[cat][1]:
tree[cat]["sub-categories"][ctg] = None
else:
for ctg in links[cat][1]:
__cat_tree_rec(
ctg,
depth,
tree[cat]["sub-categories"],
level + 1,
categories,
links,
)
# ###################################
# ### Actual Function Code ###
# ###################################
# make it simple to use both a list or a single category term
if not isinstance(category, list):
cats = [category]
else:
cats = category
# parameter verification
if len(cats) == 1 and (cats[0] is None or cats[0] == ""):
msg = (
"CategoryTree: Parameter 'category' must either "
"be a list of one or more categories or a string; "
"provided: '{}'".format(category)
)
raise ValueError(msg)
if depth is not None and depth < 1:
msg = (
"CategoryTree: Parameter 'depth' must be either None "
"(for the full tree) or be greater than 0"
)
raise ValueError(msg)
results = dict()
categories = dict()
links = dict()
for cat in cats:
if cat is None or cat == "":
continue
__cat_tree_rec(cat, depth, results, 0, categories, links)
return results | python | def categorytree(self, category, depth=5):
""" Generate the Category Tree for the given categories
Args:
category(str or list of strings): Category name(s)
depth(int): Depth to traverse the tree
Returns:
dict: Category tree structure
Note:
Set depth to **None** to get the whole tree
Note:
Return Data Structure: Subcategory contains the same \
recursive structure
>>> {
'category': {
'depth': Number,
'links': list,
'parent-categories': list,
'sub-categories': dict
}
}
.. versionadded:: 0.3.10 """
def __cat_tree_rec(cat, depth, tree, level, categories, links):
""" recursive function to build out the tree """
tree[cat] = dict()
tree[cat]["depth"] = level
tree[cat]["sub-categories"] = dict()
tree[cat]["links"] = list()
tree[cat]["parent-categories"] = list()
parent_cats = list()
if cat not in categories:
tries = 0
while True:
if tries > 10:
raise MediaWikiCategoryTreeError(cat)
try:
pag = self.page("{0}:{1}".format(self.category_prefix, cat))
categories[cat] = pag
parent_cats = categories[cat].categories
links[cat] = self.categorymembers(
cat, results=None, subcategories=True
)
break
except PageError:
raise PageError("{0}:{1}".format(self.category_prefix, cat))
except KeyboardInterrupt:
raise
except Exception:
tries = tries + 1
time.sleep(1)
else:
parent_cats = categories[cat].categories
tree[cat]["parent-categories"].extend(parent_cats)
tree[cat]["links"].extend(links[cat][0])
if depth and level >= depth:
for ctg in links[cat][1]:
tree[cat]["sub-categories"][ctg] = None
else:
for ctg in links[cat][1]:
__cat_tree_rec(
ctg,
depth,
tree[cat]["sub-categories"],
level + 1,
categories,
links,
)
# ###################################
# ### Actual Function Code ###
# ###################################
# make it simple to use both a list or a single category term
if not isinstance(category, list):
cats = [category]
else:
cats = category
# parameter verification
if len(cats) == 1 and (cats[0] is None or cats[0] == ""):
msg = (
"CategoryTree: Parameter 'category' must either "
"be a list of one or more categories or a string; "
"provided: '{}'".format(category)
)
raise ValueError(msg)
if depth is not None and depth < 1:
msg = (
"CategoryTree: Parameter 'depth' must be either None "
"(for the full tree) or be greater than 0"
)
raise ValueError(msg)
results = dict()
categories = dict()
links = dict()
for cat in cats:
if cat is None or cat == "":
continue
__cat_tree_rec(cat, depth, results, 0, categories, links)
return results | [
"def",
"categorytree",
"(",
"self",
",",
"category",
",",
"depth",
"=",
"5",
")",
":",
"def",
"__cat_tree_rec",
"(",
"cat",
",",
"depth",
",",
"tree",
",",
"level",
",",
"categories",
",",
"links",
")",
":",
"\"\"\" recursive function to build out the tree \"\"\"",
"tree",
"[",
"cat",
"]",
"=",
"dict",
"(",
")",
"tree",
"[",
"cat",
"]",
"[",
"\"depth\"",
"]",
"=",
"level",
"tree",
"[",
"cat",
"]",
"[",
"\"sub-categories\"",
"]",
"=",
"dict",
"(",
")",
"tree",
"[",
"cat",
"]",
"[",
"\"links\"",
"]",
"=",
"list",
"(",
")",
"tree",
"[",
"cat",
"]",
"[",
"\"parent-categories\"",
"]",
"=",
"list",
"(",
")",
"parent_cats",
"=",
"list",
"(",
")",
"if",
"cat",
"not",
"in",
"categories",
":",
"tries",
"=",
"0",
"while",
"True",
":",
"if",
"tries",
">",
"10",
":",
"raise",
"MediaWikiCategoryTreeError",
"(",
"cat",
")",
"try",
":",
"pag",
"=",
"self",
".",
"page",
"(",
"\"{0}:{1}\"",
".",
"format",
"(",
"self",
".",
"category_prefix",
",",
"cat",
")",
")",
"categories",
"[",
"cat",
"]",
"=",
"pag",
"parent_cats",
"=",
"categories",
"[",
"cat",
"]",
".",
"categories",
"links",
"[",
"cat",
"]",
"=",
"self",
".",
"categorymembers",
"(",
"cat",
",",
"results",
"=",
"None",
",",
"subcategories",
"=",
"True",
")",
"break",
"except",
"PageError",
":",
"raise",
"PageError",
"(",
"\"{0}:{1}\"",
".",
"format",
"(",
"self",
".",
"category_prefix",
",",
"cat",
")",
")",
"except",
"KeyboardInterrupt",
":",
"raise",
"except",
"Exception",
":",
"tries",
"=",
"tries",
"+",
"1",
"time",
".",
"sleep",
"(",
"1",
")",
"else",
":",
"parent_cats",
"=",
"categories",
"[",
"cat",
"]",
".",
"categories",
"tree",
"[",
"cat",
"]",
"[",
"\"parent-categories\"",
"]",
".",
"extend",
"(",
"parent_cats",
")",
"tree",
"[",
"cat",
"]",
"[",
"\"links\"",
"]",
".",
"extend",
"(",
"links",
"[",
"cat",
"]",
"[",
"0",
"]",
")",
"if",
"depth",
"and",
"level",
">=",
"depth",
":",
"for",
"ctg",
"in",
"links",
"[",
"cat",
"]",
"[",
"1",
"]",
":",
"tree",
"[",
"cat",
"]",
"[",
"\"sub-categories\"",
"]",
"[",
"ctg",
"]",
"=",
"None",
"else",
":",
"for",
"ctg",
"in",
"links",
"[",
"cat",
"]",
"[",
"1",
"]",
":",
"__cat_tree_rec",
"(",
"ctg",
",",
"depth",
",",
"tree",
"[",
"cat",
"]",
"[",
"\"sub-categories\"",
"]",
",",
"level",
"+",
"1",
",",
"categories",
",",
"links",
",",
")",
"# ###################################",
"# ### Actual Function Code ###",
"# ###################################",
"# make it simple to use both a list or a single category term",
"if",
"not",
"isinstance",
"(",
"category",
",",
"list",
")",
":",
"cats",
"=",
"[",
"category",
"]",
"else",
":",
"cats",
"=",
"category",
"# parameter verification",
"if",
"len",
"(",
"cats",
")",
"==",
"1",
"and",
"(",
"cats",
"[",
"0",
"]",
"is",
"None",
"or",
"cats",
"[",
"0",
"]",
"==",
"\"\"",
")",
":",
"msg",
"=",
"(",
"\"CategoryTree: Parameter 'category' must either \"",
"\"be a list of one or more categories or a string; \"",
"\"provided: '{}'\"",
".",
"format",
"(",
"category",
")",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"if",
"depth",
"is",
"not",
"None",
"and",
"depth",
"<",
"1",
":",
"msg",
"=",
"(",
"\"CategoryTree: Parameter 'depth' must be either None \"",
"\"(for the full tree) or be greater than 0\"",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"results",
"=",
"dict",
"(",
")",
"categories",
"=",
"dict",
"(",
")",
"links",
"=",
"dict",
"(",
")",
"for",
"cat",
"in",
"cats",
":",
"if",
"cat",
"is",
"None",
"or",
"cat",
"==",
"\"\"",
":",
"continue",
"__cat_tree_rec",
"(",
"cat",
",",
"depth",
",",
"results",
",",
"0",
",",
"categories",
",",
"links",
")",
"return",
"results"
]
| Generate the Category Tree for the given categories
Args:
category(str or list of strings): Category name(s)
depth(int): Depth to traverse the tree
Returns:
dict: Category tree structure
Note:
Set depth to **None** to get the whole tree
Note:
Return Data Structure: Subcategory contains the same \
recursive structure
>>> {
'category': {
'depth': Number,
'links': list,
'parent-categories': list,
'sub-categories': dict
}
}
.. versionadded:: 0.3.10 | [
"Generate",
"the",
"Category",
"Tree",
"for",
"the",
"given",
"categories"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L662-L770 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.page | def page(
self, title=None, pageid=None, auto_suggest=True, redirect=True, preload=False
):
""" Get MediaWiki page based on the provided title or pageid
Args:
title (str): Page title
pageid (int): MediaWiki page identifier
auto-suggest (bool): **True:** Allow page title auto-suggest
redirect (bool): **True:** Follow page redirects
preload (bool): **True:** Load most page properties
Raises:
ValueError: when title is blank or None and no pageid is \
provided
Raises:
:py:func:`mediawiki.exceptions.PageError`: if page does \
not exist
Note:
Title takes precedence over pageid if both are provided """
if (title is None or title.strip() == "") and pageid is None:
raise ValueError("Either a title or a pageid must be specified")
elif title:
if auto_suggest:
temp_title = self.suggest(title)
if temp_title is None: # page doesn't exist
raise PageError(title=title)
else:
title = temp_title
return MediaWikiPage(self, title, redirect=redirect, preload=preload)
else: # must be pageid
return MediaWikiPage(self, pageid=pageid, preload=preload) | python | def page(
self, title=None, pageid=None, auto_suggest=True, redirect=True, preload=False
):
""" Get MediaWiki page based on the provided title or pageid
Args:
title (str): Page title
pageid (int): MediaWiki page identifier
auto-suggest (bool): **True:** Allow page title auto-suggest
redirect (bool): **True:** Follow page redirects
preload (bool): **True:** Load most page properties
Raises:
ValueError: when title is blank or None and no pageid is \
provided
Raises:
:py:func:`mediawiki.exceptions.PageError`: if page does \
not exist
Note:
Title takes precedence over pageid if both are provided """
if (title is None or title.strip() == "") and pageid is None:
raise ValueError("Either a title or a pageid must be specified")
elif title:
if auto_suggest:
temp_title = self.suggest(title)
if temp_title is None: # page doesn't exist
raise PageError(title=title)
else:
title = temp_title
return MediaWikiPage(self, title, redirect=redirect, preload=preload)
else: # must be pageid
return MediaWikiPage(self, pageid=pageid, preload=preload) | [
"def",
"page",
"(",
"self",
",",
"title",
"=",
"None",
",",
"pageid",
"=",
"None",
",",
"auto_suggest",
"=",
"True",
",",
"redirect",
"=",
"True",
",",
"preload",
"=",
"False",
")",
":",
"if",
"(",
"title",
"is",
"None",
"or",
"title",
".",
"strip",
"(",
")",
"==",
"\"\"",
")",
"and",
"pageid",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Either a title or a pageid must be specified\"",
")",
"elif",
"title",
":",
"if",
"auto_suggest",
":",
"temp_title",
"=",
"self",
".",
"suggest",
"(",
"title",
")",
"if",
"temp_title",
"is",
"None",
":",
"# page doesn't exist",
"raise",
"PageError",
"(",
"title",
"=",
"title",
")",
"else",
":",
"title",
"=",
"temp_title",
"return",
"MediaWikiPage",
"(",
"self",
",",
"title",
",",
"redirect",
"=",
"redirect",
",",
"preload",
"=",
"preload",
")",
"else",
":",
"# must be pageid",
"return",
"MediaWikiPage",
"(",
"self",
",",
"pageid",
"=",
"pageid",
",",
"preload",
"=",
"preload",
")"
]
| Get MediaWiki page based on the provided title or pageid
Args:
title (str): Page title
pageid (int): MediaWiki page identifier
auto-suggest (bool): **True:** Allow page title auto-suggest
redirect (bool): **True:** Follow page redirects
preload (bool): **True:** Load most page properties
Raises:
ValueError: when title is blank or None and no pageid is \
provided
Raises:
:py:func:`mediawiki.exceptions.PageError`: if page does \
not exist
Note:
Title takes precedence over pageid if both are provided | [
"Get",
"MediaWiki",
"page",
"based",
"on",
"the",
"provided",
"title",
"or",
"pageid"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L772-L802 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki.wiki_request | def wiki_request(self, params):
""" Make a request to the MediaWiki API using the given search
parameters
Args:
params (dict): Request parameters
Returns:
A parsed dict of the JSON response
Note:
Useful when wanting to query the MediaWiki site for some \
value that is not part of the wrapper API """
params["format"] = "json"
if "action" not in params:
params["action"] = "query"
limit = self._rate_limit
last_call = self._rate_limit_last_call
if limit and last_call and last_call + self._min_wait > datetime.now():
# call time to quick for rate limited api requests, wait
wait_time = (last_call + self._min_wait) - datetime.now()
time.sleep(int(wait_time.total_seconds()))
req = self._get_response(params)
if self._rate_limit:
self._rate_limit_last_call = datetime.now()
return req | python | def wiki_request(self, params):
""" Make a request to the MediaWiki API using the given search
parameters
Args:
params (dict): Request parameters
Returns:
A parsed dict of the JSON response
Note:
Useful when wanting to query the MediaWiki site for some \
value that is not part of the wrapper API """
params["format"] = "json"
if "action" not in params:
params["action"] = "query"
limit = self._rate_limit
last_call = self._rate_limit_last_call
if limit and last_call and last_call + self._min_wait > datetime.now():
# call time to quick for rate limited api requests, wait
wait_time = (last_call + self._min_wait) - datetime.now()
time.sleep(int(wait_time.total_seconds()))
req = self._get_response(params)
if self._rate_limit:
self._rate_limit_last_call = datetime.now()
return req | [
"def",
"wiki_request",
"(",
"self",
",",
"params",
")",
":",
"params",
"[",
"\"format\"",
"]",
"=",
"\"json\"",
"if",
"\"action\"",
"not",
"in",
"params",
":",
"params",
"[",
"\"action\"",
"]",
"=",
"\"query\"",
"limit",
"=",
"self",
".",
"_rate_limit",
"last_call",
"=",
"self",
".",
"_rate_limit_last_call",
"if",
"limit",
"and",
"last_call",
"and",
"last_call",
"+",
"self",
".",
"_min_wait",
">",
"datetime",
".",
"now",
"(",
")",
":",
"# call time to quick for rate limited api requests, wait",
"wait_time",
"=",
"(",
"last_call",
"+",
"self",
".",
"_min_wait",
")",
"-",
"datetime",
".",
"now",
"(",
")",
"time",
".",
"sleep",
"(",
"int",
"(",
"wait_time",
".",
"total_seconds",
"(",
")",
")",
")",
"req",
"=",
"self",
".",
"_get_response",
"(",
"params",
")",
"if",
"self",
".",
"_rate_limit",
":",
"self",
".",
"_rate_limit_last_call",
"=",
"datetime",
".",
"now",
"(",
")",
"return",
"req"
]
| Make a request to the MediaWiki API using the given search
parameters
Args:
params (dict): Request parameters
Returns:
A parsed dict of the JSON response
Note:
Useful when wanting to query the MediaWiki site for some \
value that is not part of the wrapper API | [
"Make",
"a",
"request",
"to",
"the",
"MediaWiki",
"API",
"using",
"the",
"given",
"search",
"parameters"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L804-L832 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki._get_site_info | def _get_site_info(self):
""" Parse out the Wikimedia site information including
API Version and Extensions """
response = self.wiki_request(
{"meta": "siteinfo", "siprop": "extensions|general"}
)
# parse what we need out here!
query = response.get("query", None)
if query is None or query.get("general", None) is None:
raise MediaWikiException("Missing query in response")
gen = query.get("general", None)
api_version = gen["generator"].split(" ")[1].split("-")[0]
major_minor = api_version.split(".")
for i, item in enumerate(major_minor):
major_minor[i] = int(item)
self._api_version = tuple(major_minor)
self._api_version_str = ".".join([str(x) for x in self._api_version])
# parse the base url out
tmp = gen.get("server", "")
if tmp == "":
raise MediaWikiException("Unable to parse base url")
if tmp.startswith("http://") or tmp.startswith("https://"):
self._base_url = tmp
elif gen["base"].startswith("https:"):
self._base_url = "https:{}".format(tmp)
else:
self._base_url = "http:{}".format(tmp)
self._extensions = [ext["name"] for ext in query["extensions"]]
self._extensions = sorted(list(set(self._extensions))) | python | def _get_site_info(self):
""" Parse out the Wikimedia site information including
API Version and Extensions """
response = self.wiki_request(
{"meta": "siteinfo", "siprop": "extensions|general"}
)
# parse what we need out here!
query = response.get("query", None)
if query is None or query.get("general", None) is None:
raise MediaWikiException("Missing query in response")
gen = query.get("general", None)
api_version = gen["generator"].split(" ")[1].split("-")[0]
major_minor = api_version.split(".")
for i, item in enumerate(major_minor):
major_minor[i] = int(item)
self._api_version = tuple(major_minor)
self._api_version_str = ".".join([str(x) for x in self._api_version])
# parse the base url out
tmp = gen.get("server", "")
if tmp == "":
raise MediaWikiException("Unable to parse base url")
if tmp.startswith("http://") or tmp.startswith("https://"):
self._base_url = tmp
elif gen["base"].startswith("https:"):
self._base_url = "https:{}".format(tmp)
else:
self._base_url = "http:{}".format(tmp)
self._extensions = [ext["name"] for ext in query["extensions"]]
self._extensions = sorted(list(set(self._extensions))) | [
"def",
"_get_site_info",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"wiki_request",
"(",
"{",
"\"meta\"",
":",
"\"siteinfo\"",
",",
"\"siprop\"",
":",
"\"extensions|general\"",
"}",
")",
"# parse what we need out here!",
"query",
"=",
"response",
".",
"get",
"(",
"\"query\"",
",",
"None",
")",
"if",
"query",
"is",
"None",
"or",
"query",
".",
"get",
"(",
"\"general\"",
",",
"None",
")",
"is",
"None",
":",
"raise",
"MediaWikiException",
"(",
"\"Missing query in response\"",
")",
"gen",
"=",
"query",
".",
"get",
"(",
"\"general\"",
",",
"None",
")",
"api_version",
"=",
"gen",
"[",
"\"generator\"",
"]",
".",
"split",
"(",
"\" \"",
")",
"[",
"1",
"]",
".",
"split",
"(",
"\"-\"",
")",
"[",
"0",
"]",
"major_minor",
"=",
"api_version",
".",
"split",
"(",
"\".\"",
")",
"for",
"i",
",",
"item",
"in",
"enumerate",
"(",
"major_minor",
")",
":",
"major_minor",
"[",
"i",
"]",
"=",
"int",
"(",
"item",
")",
"self",
".",
"_api_version",
"=",
"tuple",
"(",
"major_minor",
")",
"self",
".",
"_api_version_str",
"=",
"\".\"",
".",
"join",
"(",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"_api_version",
"]",
")",
"# parse the base url out",
"tmp",
"=",
"gen",
".",
"get",
"(",
"\"server\"",
",",
"\"\"",
")",
"if",
"tmp",
"==",
"\"\"",
":",
"raise",
"MediaWikiException",
"(",
"\"Unable to parse base url\"",
")",
"if",
"tmp",
".",
"startswith",
"(",
"\"http://\"",
")",
"or",
"tmp",
".",
"startswith",
"(",
"\"https://\"",
")",
":",
"self",
".",
"_base_url",
"=",
"tmp",
"elif",
"gen",
"[",
"\"base\"",
"]",
".",
"startswith",
"(",
"\"https:\"",
")",
":",
"self",
".",
"_base_url",
"=",
"\"https:{}\"",
".",
"format",
"(",
"tmp",
")",
"else",
":",
"self",
".",
"_base_url",
"=",
"\"http:{}\"",
".",
"format",
"(",
"tmp",
")",
"self",
".",
"_extensions",
"=",
"[",
"ext",
"[",
"\"name\"",
"]",
"for",
"ext",
"in",
"query",
"[",
"\"extensions\"",
"]",
"]",
"self",
".",
"_extensions",
"=",
"sorted",
"(",
"list",
"(",
"set",
"(",
"self",
".",
"_extensions",
")",
")",
")"
]
| Parse out the Wikimedia site information including
API Version and Extensions | [
"Parse",
"out",
"the",
"Wikimedia",
"site",
"information",
"including",
"API",
"Version",
"and",
"Extensions"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L835-L869 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki._check_error_response | def _check_error_response(response, query):
""" check for default error messages and throw correct exception """
if "error" in response:
http_error = ["HTTP request timed out.", "Pool queue is full"]
geo_error = [
"Page coordinates unknown.",
"One of the parameters gscoord, gspage, gsbbox is required",
"Invalid coordinate provided",
]
err = response["error"]["info"]
if err in http_error:
raise HTTPTimeoutError(query)
elif err in geo_error:
raise MediaWikiGeoCoordError(err)
else:
raise MediaWikiException(err) | python | def _check_error_response(response, query):
""" check for default error messages and throw correct exception """
if "error" in response:
http_error = ["HTTP request timed out.", "Pool queue is full"]
geo_error = [
"Page coordinates unknown.",
"One of the parameters gscoord, gspage, gsbbox is required",
"Invalid coordinate provided",
]
err = response["error"]["info"]
if err in http_error:
raise HTTPTimeoutError(query)
elif err in geo_error:
raise MediaWikiGeoCoordError(err)
else:
raise MediaWikiException(err) | [
"def",
"_check_error_response",
"(",
"response",
",",
"query",
")",
":",
"if",
"\"error\"",
"in",
"response",
":",
"http_error",
"=",
"[",
"\"HTTP request timed out.\"",
",",
"\"Pool queue is full\"",
"]",
"geo_error",
"=",
"[",
"\"Page coordinates unknown.\"",
",",
"\"One of the parameters gscoord, gspage, gsbbox is required\"",
",",
"\"Invalid coordinate provided\"",
",",
"]",
"err",
"=",
"response",
"[",
"\"error\"",
"]",
"[",
"\"info\"",
"]",
"if",
"err",
"in",
"http_error",
":",
"raise",
"HTTPTimeoutError",
"(",
"query",
")",
"elif",
"err",
"in",
"geo_error",
":",
"raise",
"MediaWikiGeoCoordError",
"(",
"err",
")",
"else",
":",
"raise",
"MediaWikiException",
"(",
"err",
")"
]
| check for default error messages and throw correct exception | [
"check",
"for",
"default",
"error",
"messages",
"and",
"throw",
"correct",
"exception"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L874-L889 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki._get_response | def _get_response(self, params):
""" wrap the call to the requests package """
return self._session.get(
self._api_url, params=params, timeout=self._timeout
).json(encoding="utf8") | python | def _get_response(self, params):
""" wrap the call to the requests package """
return self._session.get(
self._api_url, params=params, timeout=self._timeout
).json(encoding="utf8") | [
"def",
"_get_response",
"(",
"self",
",",
"params",
")",
":",
"return",
"self",
".",
"_session",
".",
"get",
"(",
"self",
".",
"_api_url",
",",
"params",
"=",
"params",
",",
"timeout",
"=",
"self",
".",
"_timeout",
")",
".",
"json",
"(",
"encoding",
"=",
"\"utf8\"",
")"
]
| wrap the call to the requests package | [
"wrap",
"the",
"call",
"to",
"the",
"requests",
"package"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L897-L901 | train |
barrust/mediawiki | mediawiki/mediawiki.py | MediaWiki._post_response | def _post_response(self, params):
""" wrap a post call to the requests package """
return self._session.post(
self._api_url, data=params, timeout=self._timeout
).json(encoding="utf8") | python | def _post_response(self, params):
""" wrap a post call to the requests package """
return self._session.post(
self._api_url, data=params, timeout=self._timeout
).json(encoding="utf8") | [
"def",
"_post_response",
"(",
"self",
",",
"params",
")",
":",
"return",
"self",
".",
"_session",
".",
"post",
"(",
"self",
".",
"_api_url",
",",
"data",
"=",
"params",
",",
"timeout",
"=",
"self",
".",
"_timeout",
")",
".",
"json",
"(",
"encoding",
"=",
"\"utf8\"",
")"
]
| wrap a post call to the requests package | [
"wrap",
"a",
"post",
"call",
"to",
"the",
"requests",
"package"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawiki.py#L903-L907 | train |
barrust/mediawiki | mediawiki/utilities.py | parse_all_arguments | def parse_all_arguments(func):
""" determine all positional and named arguments as a dict """
args = dict()
if sys.version_info < (3, 0):
func_args = inspect.getargspec(func)
if func_args.defaults is not None:
val = len(func_args.defaults)
for i, itm in enumerate(func_args.args[-val:]):
args[itm] = func_args.defaults[i]
else:
func_args = inspect.signature(func)
for itm in list(func_args.parameters)[1:]:
param = func_args.parameters[itm]
if param.default is not param.empty:
args[param.name] = param.default
return args | python | def parse_all_arguments(func):
""" determine all positional and named arguments as a dict """
args = dict()
if sys.version_info < (3, 0):
func_args = inspect.getargspec(func)
if func_args.defaults is not None:
val = len(func_args.defaults)
for i, itm in enumerate(func_args.args[-val:]):
args[itm] = func_args.defaults[i]
else:
func_args = inspect.signature(func)
for itm in list(func_args.parameters)[1:]:
param = func_args.parameters[itm]
if param.default is not param.empty:
args[param.name] = param.default
return args | [
"def",
"parse_all_arguments",
"(",
"func",
")",
":",
"args",
"=",
"dict",
"(",
")",
"if",
"sys",
".",
"version_info",
"<",
"(",
"3",
",",
"0",
")",
":",
"func_args",
"=",
"inspect",
".",
"getargspec",
"(",
"func",
")",
"if",
"func_args",
".",
"defaults",
"is",
"not",
"None",
":",
"val",
"=",
"len",
"(",
"func_args",
".",
"defaults",
")",
"for",
"i",
",",
"itm",
"in",
"enumerate",
"(",
"func_args",
".",
"args",
"[",
"-",
"val",
":",
"]",
")",
":",
"args",
"[",
"itm",
"]",
"=",
"func_args",
".",
"defaults",
"[",
"i",
"]",
"else",
":",
"func_args",
"=",
"inspect",
".",
"signature",
"(",
"func",
")",
"for",
"itm",
"in",
"list",
"(",
"func_args",
".",
"parameters",
")",
"[",
"1",
":",
"]",
":",
"param",
"=",
"func_args",
".",
"parameters",
"[",
"itm",
"]",
"if",
"param",
".",
"default",
"is",
"not",
"param",
".",
"empty",
":",
"args",
"[",
"param",
".",
"name",
"]",
"=",
"param",
".",
"default",
"return",
"args"
]
| determine all positional and named arguments as a dict | [
"determine",
"all",
"positional",
"and",
"named",
"arguments",
"as",
"a",
"dict"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/utilities.py#L11-L26 | train |
barrust/mediawiki | mediawiki/utilities.py | str_or_unicode | def str_or_unicode(text):
""" handle python 3 unicode and python 2.7 byte strings """
encoding = sys.stdout.encoding
if sys.version_info > (3, 0):
return text.encode(encoding).decode(encoding)
return text.encode(encoding) | python | def str_or_unicode(text):
""" handle python 3 unicode and python 2.7 byte strings """
encoding = sys.stdout.encoding
if sys.version_info > (3, 0):
return text.encode(encoding).decode(encoding)
return text.encode(encoding) | [
"def",
"str_or_unicode",
"(",
"text",
")",
":",
"encoding",
"=",
"sys",
".",
"stdout",
".",
"encoding",
"if",
"sys",
".",
"version_info",
">",
"(",
"3",
",",
"0",
")",
":",
"return",
"text",
".",
"encode",
"(",
"encoding",
")",
".",
"decode",
"(",
"encoding",
")",
"return",
"text",
".",
"encode",
"(",
"encoding",
")"
]
| handle python 3 unicode and python 2.7 byte strings | [
"handle",
"python",
"3",
"unicode",
"and",
"python",
"2",
".",
"7",
"byte",
"strings"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/utilities.py#L78-L83 | train |
barrust/mediawiki | mediawiki/utilities.py | is_relative_url | def is_relative_url(url):
""" simple method to determine if a url is relative or absolute """
if url.startswith("#"):
return None
if url.find("://") > 0 or url.startswith("//"):
# either 'http(s)://...' or '//cdn...' and therefore absolute
return False
return True | python | def is_relative_url(url):
""" simple method to determine if a url is relative or absolute """
if url.startswith("#"):
return None
if url.find("://") > 0 or url.startswith("//"):
# either 'http(s)://...' or '//cdn...' and therefore absolute
return False
return True | [
"def",
"is_relative_url",
"(",
"url",
")",
":",
"if",
"url",
".",
"startswith",
"(",
"\"#\"",
")",
":",
"return",
"None",
"if",
"url",
".",
"find",
"(",
"\"://\"",
")",
">",
"0",
"or",
"url",
".",
"startswith",
"(",
"\"//\"",
")",
":",
"# either 'http(s)://...' or '//cdn...' and therefore absolute",
"return",
"False",
"return",
"True"
]
| simple method to determine if a url is relative or absolute | [
"simple",
"method",
"to",
"determine",
"if",
"a",
"url",
"is",
"relative",
"or",
"absolute"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/utilities.py#L86-L93 | train |
barrust/mediawiki | setup.py | read_file | def read_file(filepath):
""" read the file """
with io.open(filepath, "r") as filepointer:
res = filepointer.read()
return res | python | def read_file(filepath):
""" read the file """
with io.open(filepath, "r") as filepointer:
res = filepointer.read()
return res | [
"def",
"read_file",
"(",
"filepath",
")",
":",
"with",
"io",
".",
"open",
"(",
"filepath",
",",
"\"r\"",
")",
"as",
"filepointer",
":",
"res",
"=",
"filepointer",
".",
"read",
"(",
")",
"return",
"res"
]
| read the file | [
"read",
"the",
"file"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/setup.py#L14-L18 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage._pull_content_revision_parent | def _pull_content_revision_parent(self):
""" combine the pulling of these three properties """
if self._revision_id is None:
query_params = {
"prop": "extracts|revisions",
"explaintext": "",
"rvprop": "ids",
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
page_info = request["query"]["pages"][self.pageid]
self._content = page_info["extract"]
self._revision_id = page_info["revisions"][0]["revid"]
self._parent_id = page_info["revisions"][0]["parentid"]
return self._content, self._revision_id, self._parent_id | python | def _pull_content_revision_parent(self):
""" combine the pulling of these three properties """
if self._revision_id is None:
query_params = {
"prop": "extracts|revisions",
"explaintext": "",
"rvprop": "ids",
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
page_info = request["query"]["pages"][self.pageid]
self._content = page_info["extract"]
self._revision_id = page_info["revisions"][0]["revid"]
self._parent_id = page_info["revisions"][0]["parentid"]
return self._content, self._revision_id, self._parent_id | [
"def",
"_pull_content_revision_parent",
"(",
"self",
")",
":",
"if",
"self",
".",
"_revision_id",
"is",
"None",
":",
"query_params",
"=",
"{",
"\"prop\"",
":",
"\"extracts|revisions\"",
",",
"\"explaintext\"",
":",
"\"\"",
",",
"\"rvprop\"",
":",
"\"ids\"",
",",
"}",
"query_params",
".",
"update",
"(",
"self",
".",
"__title_query_param",
"(",
")",
")",
"request",
"=",
"self",
".",
"mediawiki",
".",
"wiki_request",
"(",
"query_params",
")",
"page_info",
"=",
"request",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"self",
".",
"pageid",
"]",
"self",
".",
"_content",
"=",
"page_info",
"[",
"\"extract\"",
"]",
"self",
".",
"_revision_id",
"=",
"page_info",
"[",
"\"revisions\"",
"]",
"[",
"0",
"]",
"[",
"\"revid\"",
"]",
"self",
".",
"_parent_id",
"=",
"page_info",
"[",
"\"revisions\"",
"]",
"[",
"0",
"]",
"[",
"\"parentid\"",
"]",
"return",
"self",
".",
"_content",
",",
"self",
".",
"_revision_id",
",",
"self",
".",
"_parent_id"
]
| combine the pulling of these three properties | [
"combine",
"the",
"pulling",
"of",
"these",
"three",
"properties"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L127-L142 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage.section | def section(self, section_title):
""" Plain text section content
Args:
section_title (str): Name of the section to pull
Returns:
str: The content of the section
Note:
Returns **None** if section title is not found; only text \
between title and next section or sub-section title is returned
Note:
Side effect is to also pull the content which can be slow
Note:
This is a parsing operation and not part of the standard API"""
section = "== {0} ==".format(section_title)
try:
content = self.content
index = content.index(section) + len(section)
# ensure we have the full section header...
while True:
if content[index + 1] == "=":
index += 1
else:
break
except ValueError:
return None
except IndexError:
pass
try:
next_index = self.content.index("==", index)
except ValueError:
next_index = len(self.content)
return self.content[index:next_index].lstrip("=").strip() | python | def section(self, section_title):
""" Plain text section content
Args:
section_title (str): Name of the section to pull
Returns:
str: The content of the section
Note:
Returns **None** if section title is not found; only text \
between title and next section or sub-section title is returned
Note:
Side effect is to also pull the content which can be slow
Note:
This is a parsing operation and not part of the standard API"""
section = "== {0} ==".format(section_title)
try:
content = self.content
index = content.index(section) + len(section)
# ensure we have the full section header...
while True:
if content[index + 1] == "=":
index += 1
else:
break
except ValueError:
return None
except IndexError:
pass
try:
next_index = self.content.index("==", index)
except ValueError:
next_index = len(self.content)
return self.content[index:next_index].lstrip("=").strip() | [
"def",
"section",
"(",
"self",
",",
"section_title",
")",
":",
"section",
"=",
"\"== {0} ==\"",
".",
"format",
"(",
"section_title",
")",
"try",
":",
"content",
"=",
"self",
".",
"content",
"index",
"=",
"content",
".",
"index",
"(",
"section",
")",
"+",
"len",
"(",
"section",
")",
"# ensure we have the full section header...",
"while",
"True",
":",
"if",
"content",
"[",
"index",
"+",
"1",
"]",
"==",
"\"=\"",
":",
"index",
"+=",
"1",
"else",
":",
"break",
"except",
"ValueError",
":",
"return",
"None",
"except",
"IndexError",
":",
"pass",
"try",
":",
"next_index",
"=",
"self",
".",
"content",
".",
"index",
"(",
"\"==\"",
",",
"index",
")",
"except",
"ValueError",
":",
"next_index",
"=",
"len",
"(",
"self",
".",
"content",
")",
"return",
"self",
".",
"content",
"[",
"index",
":",
"next_index",
"]",
".",
"lstrip",
"(",
"\"=\"",
")",
".",
"strip",
"(",
")"
]
| Plain text section content
Args:
section_title (str): Name of the section to pull
Returns:
str: The content of the section
Note:
Returns **None** if section title is not found; only text \
between title and next section or sub-section title is returned
Note:
Side effect is to also pull the content which can be slow
Note:
This is a parsing operation and not part of the standard API | [
"Plain",
"text",
"section",
"content"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L412-L447 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage.parse_section_links | def parse_section_links(self, section_title):
""" Parse all links within a section
Args:
section_title (str): Name of the section to pull
Returns:
list: List of (title, url) tuples
Note:
Returns **None** if section title is not found
Note:
Side effect is to also pull the html which can be slow
Note:
This is a parsing operation and not part of the standard API"""
soup = BeautifulSoup(self.html, "html.parser")
headlines = soup.find_all("span", {"class": "mw-headline"})
tmp_soup = BeautifulSoup(section_title, "html.parser")
tmp_sec_title = tmp_soup.get_text().lower()
id_tag = None
for headline in headlines:
tmp_id = headline.text
if tmp_id.lower() == tmp_sec_title:
id_tag = headline.get("id")
break
if id_tag is not None:
return self._parse_section_links(id_tag)
return None | python | def parse_section_links(self, section_title):
""" Parse all links within a section
Args:
section_title (str): Name of the section to pull
Returns:
list: List of (title, url) tuples
Note:
Returns **None** if section title is not found
Note:
Side effect is to also pull the html which can be slow
Note:
This is a parsing operation and not part of the standard API"""
soup = BeautifulSoup(self.html, "html.parser")
headlines = soup.find_all("span", {"class": "mw-headline"})
tmp_soup = BeautifulSoup(section_title, "html.parser")
tmp_sec_title = tmp_soup.get_text().lower()
id_tag = None
for headline in headlines:
tmp_id = headline.text
if tmp_id.lower() == tmp_sec_title:
id_tag = headline.get("id")
break
if id_tag is not None:
return self._parse_section_links(id_tag)
return None | [
"def",
"parse_section_links",
"(",
"self",
",",
"section_title",
")",
":",
"soup",
"=",
"BeautifulSoup",
"(",
"self",
".",
"html",
",",
"\"html.parser\"",
")",
"headlines",
"=",
"soup",
".",
"find_all",
"(",
"\"span\"",
",",
"{",
"\"class\"",
":",
"\"mw-headline\"",
"}",
")",
"tmp_soup",
"=",
"BeautifulSoup",
"(",
"section_title",
",",
"\"html.parser\"",
")",
"tmp_sec_title",
"=",
"tmp_soup",
".",
"get_text",
"(",
")",
".",
"lower",
"(",
")",
"id_tag",
"=",
"None",
"for",
"headline",
"in",
"headlines",
":",
"tmp_id",
"=",
"headline",
".",
"text",
"if",
"tmp_id",
".",
"lower",
"(",
")",
"==",
"tmp_sec_title",
":",
"id_tag",
"=",
"headline",
".",
"get",
"(",
"\"id\"",
")",
"break",
"if",
"id_tag",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_parse_section_links",
"(",
"id_tag",
")",
"return",
"None"
]
| Parse all links within a section
Args:
section_title (str): Name of the section to pull
Returns:
list: List of (title, url) tuples
Note:
Returns **None** if section title is not found
Note:
Side effect is to also pull the html which can be slow
Note:
This is a parsing operation and not part of the standard API | [
"Parse",
"all",
"links",
"within",
"a",
"section"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L449-L475 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage.__load | def __load(self, redirect=True, preload=False):
""" load the basic page information """
query_params = {
"prop": "info|pageprops",
"inprop": "url",
"ppprop": "disambiguation",
"redirects": "",
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
query = request["query"]
pageid = list(query["pages"].keys())[0]
page = query["pages"][pageid]
# determine result of the request
# missing is present if the page is missing
if "missing" in page:
self._raise_page_error()
# redirects is present in query if page is a redirect
elif "redirects" in query:
self._handle_redirect(redirect, preload, query, page)
# if pageprops is returned, it must be a disambiguation error
elif "pageprops" in page:
self._raise_disambiguation_error(page, pageid)
else:
self.pageid = pageid
self.title = page["title"]
self.url = page["fullurl"] | python | def __load(self, redirect=True, preload=False):
""" load the basic page information """
query_params = {
"prop": "info|pageprops",
"inprop": "url",
"ppprop": "disambiguation",
"redirects": "",
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
query = request["query"]
pageid = list(query["pages"].keys())[0]
page = query["pages"][pageid]
# determine result of the request
# missing is present if the page is missing
if "missing" in page:
self._raise_page_error()
# redirects is present in query if page is a redirect
elif "redirects" in query:
self._handle_redirect(redirect, preload, query, page)
# if pageprops is returned, it must be a disambiguation error
elif "pageprops" in page:
self._raise_disambiguation_error(page, pageid)
else:
self.pageid = pageid
self.title = page["title"]
self.url = page["fullurl"] | [
"def",
"__load",
"(",
"self",
",",
"redirect",
"=",
"True",
",",
"preload",
"=",
"False",
")",
":",
"query_params",
"=",
"{",
"\"prop\"",
":",
"\"info|pageprops\"",
",",
"\"inprop\"",
":",
"\"url\"",
",",
"\"ppprop\"",
":",
"\"disambiguation\"",
",",
"\"redirects\"",
":",
"\"\"",
",",
"}",
"query_params",
".",
"update",
"(",
"self",
".",
"__title_query_param",
"(",
")",
")",
"request",
"=",
"self",
".",
"mediawiki",
".",
"wiki_request",
"(",
"query_params",
")",
"query",
"=",
"request",
"[",
"\"query\"",
"]",
"pageid",
"=",
"list",
"(",
"query",
"[",
"\"pages\"",
"]",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"page",
"=",
"query",
"[",
"\"pages\"",
"]",
"[",
"pageid",
"]",
"# determine result of the request",
"# missing is present if the page is missing",
"if",
"\"missing\"",
"in",
"page",
":",
"self",
".",
"_raise_page_error",
"(",
")",
"# redirects is present in query if page is a redirect",
"elif",
"\"redirects\"",
"in",
"query",
":",
"self",
".",
"_handle_redirect",
"(",
"redirect",
",",
"preload",
",",
"query",
",",
"page",
")",
"# if pageprops is returned, it must be a disambiguation error",
"elif",
"\"pageprops\"",
"in",
"page",
":",
"self",
".",
"_raise_disambiguation_error",
"(",
"page",
",",
"pageid",
")",
"else",
":",
"self",
".",
"pageid",
"=",
"pageid",
"self",
".",
"title",
"=",
"page",
"[",
"\"title\"",
"]",
"self",
".",
"url",
"=",
"page",
"[",
"\"fullurl\"",
"]"
]
| load the basic page information | [
"load",
"the",
"basic",
"page",
"information"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L478-L507 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage._raise_page_error | def _raise_page_error(self):
""" raise the correct type of page error """
if hasattr(self, "title"):
raise PageError(title=self.title)
else:
raise PageError(pageid=self.pageid) | python | def _raise_page_error(self):
""" raise the correct type of page error """
if hasattr(self, "title"):
raise PageError(title=self.title)
else:
raise PageError(pageid=self.pageid) | [
"def",
"_raise_page_error",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"\"title\"",
")",
":",
"raise",
"PageError",
"(",
"title",
"=",
"self",
".",
"title",
")",
"else",
":",
"raise",
"PageError",
"(",
"pageid",
"=",
"self",
".",
"pageid",
")"
]
| raise the correct type of page error | [
"raise",
"the",
"correct",
"type",
"of",
"page",
"error"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L509-L514 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage._raise_disambiguation_error | def _raise_disambiguation_error(self, page, pageid):
""" parse and throw a disambiguation error """
query_params = {
"prop": "revisions",
"rvprop": "content",
"rvparse": "",
"rvlimit": 1,
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
html = request["query"]["pages"][pageid]["revisions"][0]["*"]
lis = BeautifulSoup(html, "html.parser").find_all("li")
filtered_lis = [
li for li in lis if "tocsection" not in "".join(li.get("class", list()))
]
may_refer_to = [li.a.get_text() for li in filtered_lis if li.a]
disambiguation = list()
for lis_item in filtered_lis:
item = lis_item.find_all("a")
one_disambiguation = dict()
one_disambiguation["description"] = lis_item.text
if item and hasattr(item[0], "title"):
one_disambiguation["title"] = item[0]["title"]
else:
# these are non-linked records so double up the text
one_disambiguation["title"] = lis_item.text
disambiguation.append(one_disambiguation)
raise DisambiguationError(
getattr(self, "title", page["title"]),
may_refer_to,
page["fullurl"],
disambiguation,
) | python | def _raise_disambiguation_error(self, page, pageid):
""" parse and throw a disambiguation error """
query_params = {
"prop": "revisions",
"rvprop": "content",
"rvparse": "",
"rvlimit": 1,
}
query_params.update(self.__title_query_param())
request = self.mediawiki.wiki_request(query_params)
html = request["query"]["pages"][pageid]["revisions"][0]["*"]
lis = BeautifulSoup(html, "html.parser").find_all("li")
filtered_lis = [
li for li in lis if "tocsection" not in "".join(li.get("class", list()))
]
may_refer_to = [li.a.get_text() for li in filtered_lis if li.a]
disambiguation = list()
for lis_item in filtered_lis:
item = lis_item.find_all("a")
one_disambiguation = dict()
one_disambiguation["description"] = lis_item.text
if item and hasattr(item[0], "title"):
one_disambiguation["title"] = item[0]["title"]
else:
# these are non-linked records so double up the text
one_disambiguation["title"] = lis_item.text
disambiguation.append(one_disambiguation)
raise DisambiguationError(
getattr(self, "title", page["title"]),
may_refer_to,
page["fullurl"],
disambiguation,
) | [
"def",
"_raise_disambiguation_error",
"(",
"self",
",",
"page",
",",
"pageid",
")",
":",
"query_params",
"=",
"{",
"\"prop\"",
":",
"\"revisions\"",
",",
"\"rvprop\"",
":",
"\"content\"",
",",
"\"rvparse\"",
":",
"\"\"",
",",
"\"rvlimit\"",
":",
"1",
",",
"}",
"query_params",
".",
"update",
"(",
"self",
".",
"__title_query_param",
"(",
")",
")",
"request",
"=",
"self",
".",
"mediawiki",
".",
"wiki_request",
"(",
"query_params",
")",
"html",
"=",
"request",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"pageid",
"]",
"[",
"\"revisions\"",
"]",
"[",
"0",
"]",
"[",
"\"*\"",
"]",
"lis",
"=",
"BeautifulSoup",
"(",
"html",
",",
"\"html.parser\"",
")",
".",
"find_all",
"(",
"\"li\"",
")",
"filtered_lis",
"=",
"[",
"li",
"for",
"li",
"in",
"lis",
"if",
"\"tocsection\"",
"not",
"in",
"\"\"",
".",
"join",
"(",
"li",
".",
"get",
"(",
"\"class\"",
",",
"list",
"(",
")",
")",
")",
"]",
"may_refer_to",
"=",
"[",
"li",
".",
"a",
".",
"get_text",
"(",
")",
"for",
"li",
"in",
"filtered_lis",
"if",
"li",
".",
"a",
"]",
"disambiguation",
"=",
"list",
"(",
")",
"for",
"lis_item",
"in",
"filtered_lis",
":",
"item",
"=",
"lis_item",
".",
"find_all",
"(",
"\"a\"",
")",
"one_disambiguation",
"=",
"dict",
"(",
")",
"one_disambiguation",
"[",
"\"description\"",
"]",
"=",
"lis_item",
".",
"text",
"if",
"item",
"and",
"hasattr",
"(",
"item",
"[",
"0",
"]",
",",
"\"title\"",
")",
":",
"one_disambiguation",
"[",
"\"title\"",
"]",
"=",
"item",
"[",
"0",
"]",
"[",
"\"title\"",
"]",
"else",
":",
"# these are non-linked records so double up the text",
"one_disambiguation",
"[",
"\"title\"",
"]",
"=",
"lis_item",
".",
"text",
"disambiguation",
".",
"append",
"(",
"one_disambiguation",
")",
"raise",
"DisambiguationError",
"(",
"getattr",
"(",
"self",
",",
"\"title\"",
",",
"page",
"[",
"\"title\"",
"]",
")",
",",
"may_refer_to",
",",
"page",
"[",
"\"fullurl\"",
"]",
",",
"disambiguation",
",",
")"
]
| parse and throw a disambiguation error | [
"parse",
"and",
"throw",
"a",
"disambiguation",
"error"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L516-L550 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage._parse_section_links | def _parse_section_links(self, id_tag):
""" given a section id, parse the links in the unordered list """
soup = BeautifulSoup(self.html, "html.parser")
info = soup.find("span", {"id": id_tag})
all_links = list()
if info is None:
return all_links
for node in soup.find(id=id_tag).parent.next_siblings:
if not isinstance(node, Tag):
continue
elif node.get("role", "") == "navigation":
continue
elif "infobox" in node.get("class", []):
continue
# this is actually the child node's class...
is_headline = node.find("span", {"class": "mw-headline"})
if is_headline is not None:
break
elif node.name == "a":
all_links.append(self.__parse_link_info(node))
else:
for link in node.findAll("a"):
all_links.append(self.__parse_link_info(link))
return all_links | python | def _parse_section_links(self, id_tag):
""" given a section id, parse the links in the unordered list """
soup = BeautifulSoup(self.html, "html.parser")
info = soup.find("span", {"id": id_tag})
all_links = list()
if info is None:
return all_links
for node in soup.find(id=id_tag).parent.next_siblings:
if not isinstance(node, Tag):
continue
elif node.get("role", "") == "navigation":
continue
elif "infobox" in node.get("class", []):
continue
# this is actually the child node's class...
is_headline = node.find("span", {"class": "mw-headline"})
if is_headline is not None:
break
elif node.name == "a":
all_links.append(self.__parse_link_info(node))
else:
for link in node.findAll("a"):
all_links.append(self.__parse_link_info(link))
return all_links | [
"def",
"_parse_section_links",
"(",
"self",
",",
"id_tag",
")",
":",
"soup",
"=",
"BeautifulSoup",
"(",
"self",
".",
"html",
",",
"\"html.parser\"",
")",
"info",
"=",
"soup",
".",
"find",
"(",
"\"span\"",
",",
"{",
"\"id\"",
":",
"id_tag",
"}",
")",
"all_links",
"=",
"list",
"(",
")",
"if",
"info",
"is",
"None",
":",
"return",
"all_links",
"for",
"node",
"in",
"soup",
".",
"find",
"(",
"id",
"=",
"id_tag",
")",
".",
"parent",
".",
"next_siblings",
":",
"if",
"not",
"isinstance",
"(",
"node",
",",
"Tag",
")",
":",
"continue",
"elif",
"node",
".",
"get",
"(",
"\"role\"",
",",
"\"\"",
")",
"==",
"\"navigation\"",
":",
"continue",
"elif",
"\"infobox\"",
"in",
"node",
".",
"get",
"(",
"\"class\"",
",",
"[",
"]",
")",
":",
"continue",
"# this is actually the child node's class...",
"is_headline",
"=",
"node",
".",
"find",
"(",
"\"span\"",
",",
"{",
"\"class\"",
":",
"\"mw-headline\"",
"}",
")",
"if",
"is_headline",
"is",
"not",
"None",
":",
"break",
"elif",
"node",
".",
"name",
"==",
"\"a\"",
":",
"all_links",
".",
"append",
"(",
"self",
".",
"__parse_link_info",
"(",
"node",
")",
")",
"else",
":",
"for",
"link",
"in",
"node",
".",
"findAll",
"(",
"\"a\"",
")",
":",
"all_links",
".",
"append",
"(",
"self",
".",
"__parse_link_info",
"(",
"link",
")",
")",
"return",
"all_links"
]
| given a section id, parse the links in the unordered list | [
"given",
"a",
"section",
"id",
"parse",
"the",
"links",
"in",
"the",
"unordered",
"list"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L613-L639 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage._parse_sections | def _parse_sections(self):
""" parse sections and TOC """
def _list_to_dict(_dict, path, sec):
tmp = _dict
for elm in path[:-1]:
tmp = tmp[elm]
tmp[sec] = OrderedDict()
self._sections = list()
section_regexp = r"\n==* .* ==*\n" # '== {STUFF_NOT_\n} =='
found_obj = re.findall(section_regexp, self.content)
res = OrderedDict()
path = list()
last_depth = 0
for obj in found_obj:
depth = obj.count("=") / 2 # this gets us to the single side...
depth -= 2 # now, we can calculate depth
sec = obj.lstrip("\n= ").rstrip(" =\n")
if depth == 0:
last_depth = 0
path = [sec]
res[sec] = OrderedDict()
elif depth > last_depth:
last_depth = depth
path.append(sec)
_list_to_dict(res, path, sec)
elif depth < last_depth:
# path.pop()
while last_depth > depth:
path.pop()
last_depth -= 1
path.pop()
path.append(sec)
_list_to_dict(res, path, sec)
last_depth = depth
else:
path.pop()
path.append(sec)
_list_to_dict(res, path, sec)
last_depth = depth
self._sections.append(sec)
self._table_of_contents = res | python | def _parse_sections(self):
""" parse sections and TOC """
def _list_to_dict(_dict, path, sec):
tmp = _dict
for elm in path[:-1]:
tmp = tmp[elm]
tmp[sec] = OrderedDict()
self._sections = list()
section_regexp = r"\n==* .* ==*\n" # '== {STUFF_NOT_\n} =='
found_obj = re.findall(section_regexp, self.content)
res = OrderedDict()
path = list()
last_depth = 0
for obj in found_obj:
depth = obj.count("=") / 2 # this gets us to the single side...
depth -= 2 # now, we can calculate depth
sec = obj.lstrip("\n= ").rstrip(" =\n")
if depth == 0:
last_depth = 0
path = [sec]
res[sec] = OrderedDict()
elif depth > last_depth:
last_depth = depth
path.append(sec)
_list_to_dict(res, path, sec)
elif depth < last_depth:
# path.pop()
while last_depth > depth:
path.pop()
last_depth -= 1
path.pop()
path.append(sec)
_list_to_dict(res, path, sec)
last_depth = depth
else:
path.pop()
path.append(sec)
_list_to_dict(res, path, sec)
last_depth = depth
self._sections.append(sec)
self._table_of_contents = res | [
"def",
"_parse_sections",
"(",
"self",
")",
":",
"def",
"_list_to_dict",
"(",
"_dict",
",",
"path",
",",
"sec",
")",
":",
"tmp",
"=",
"_dict",
"for",
"elm",
"in",
"path",
"[",
":",
"-",
"1",
"]",
":",
"tmp",
"=",
"tmp",
"[",
"elm",
"]",
"tmp",
"[",
"sec",
"]",
"=",
"OrderedDict",
"(",
")",
"self",
".",
"_sections",
"=",
"list",
"(",
")",
"section_regexp",
"=",
"r\"\\n==* .* ==*\\n\"",
"# '== {STUFF_NOT_\\n} =='",
"found_obj",
"=",
"re",
".",
"findall",
"(",
"section_regexp",
",",
"self",
".",
"content",
")",
"res",
"=",
"OrderedDict",
"(",
")",
"path",
"=",
"list",
"(",
")",
"last_depth",
"=",
"0",
"for",
"obj",
"in",
"found_obj",
":",
"depth",
"=",
"obj",
".",
"count",
"(",
"\"=\"",
")",
"/",
"2",
"# this gets us to the single side...",
"depth",
"-=",
"2",
"# now, we can calculate depth",
"sec",
"=",
"obj",
".",
"lstrip",
"(",
"\"\\n= \"",
")",
".",
"rstrip",
"(",
"\" =\\n\"",
")",
"if",
"depth",
"==",
"0",
":",
"last_depth",
"=",
"0",
"path",
"=",
"[",
"sec",
"]",
"res",
"[",
"sec",
"]",
"=",
"OrderedDict",
"(",
")",
"elif",
"depth",
">",
"last_depth",
":",
"last_depth",
"=",
"depth",
"path",
".",
"append",
"(",
"sec",
")",
"_list_to_dict",
"(",
"res",
",",
"path",
",",
"sec",
")",
"elif",
"depth",
"<",
"last_depth",
":",
"# path.pop()",
"while",
"last_depth",
">",
"depth",
":",
"path",
".",
"pop",
"(",
")",
"last_depth",
"-=",
"1",
"path",
".",
"pop",
"(",
")",
"path",
".",
"append",
"(",
"sec",
")",
"_list_to_dict",
"(",
"res",
",",
"path",
",",
"sec",
")",
"last_depth",
"=",
"depth",
"else",
":",
"path",
".",
"pop",
"(",
")",
"path",
".",
"append",
"(",
"sec",
")",
"_list_to_dict",
"(",
"res",
",",
"path",
",",
"sec",
")",
"last_depth",
"=",
"depth",
"self",
".",
"_sections",
".",
"append",
"(",
"sec",
")",
"self",
".",
"_table_of_contents",
"=",
"res"
]
| parse sections and TOC | [
"parse",
"sections",
"and",
"TOC"
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L654-L699 | train |
barrust/mediawiki | mediawiki/mediawikipage.py | MediaWikiPage.__pull_combined_properties | def __pull_combined_properties(self):
""" something here... """
query_params = {
"titles": self.title,
"prop": "extracts|redirects|links|coordinates|categories|extlinks",
"continue": dict(),
# summary
"explaintext": "",
"exintro": "", # full first section for the summary!
# redirects
"rdprop": "title",
"rdlimit": "max",
# links
"plnamespace": 0,
"pllimit": "max",
# coordinates
"colimit": "max",
# categories
"cllimit": "max",
"clshow": "!hidden",
# references
"ellimit": "max",
}
last_cont = dict()
results = dict()
idx = 0
while True:
params = query_params.copy()
params.update(last_cont)
request = self.mediawiki.wiki_request(params)
idx += 1
# print(idx)
# quick exit
if "query" not in request:
# print(request)
break
keys = [
"extracts",
"redirects",
"links",
"coordinates",
"categories",
"extlinks",
]
new_cont = request.get("continue")
request = request["query"]["pages"][self.pageid]
if not results:
results = request
else:
for key in keys:
if key in request and request.get(key) is not None:
val = request.get(key)
tmp = results.get(key)
if isinstance(tmp, (list, tuple)):
results[key] = results.get(key, list) + val
if new_cont is None or new_cont == last_cont:
break
last_cont = new_cont
# redirects
tmp = [link["title"] for link in results.get("redirects", list())]
self._redirects = sorted(tmp)
# summary
self._summary = results["extract"]
# links
tmp = [link["title"] for link in results.get("links", list())]
self._links = sorted(tmp)
# categories
def _get_cat(val):
""" parse the category correctly """
tmp = val["title"]
if tmp.startswith(self.mediawiki.category_prefix):
return tmp[len(self.mediawiki.category_prefix) + 1 :]
return tmp
tmp = [_get_cat(link) for link in results.get("categories", list())]
self._categories = sorted(tmp)
# coordinates
if "coordinates" in results:
self._coordinates = (
Decimal(results["coordinates"][0]["lat"]),
Decimal(results["coordinates"][0]["lon"]),
)
# references
tmp = [link["*"] for link in results.get("extlinks", list())]
self._references = sorted(tmp) | python | def __pull_combined_properties(self):
""" something here... """
query_params = {
"titles": self.title,
"prop": "extracts|redirects|links|coordinates|categories|extlinks",
"continue": dict(),
# summary
"explaintext": "",
"exintro": "", # full first section for the summary!
# redirects
"rdprop": "title",
"rdlimit": "max",
# links
"plnamespace": 0,
"pllimit": "max",
# coordinates
"colimit": "max",
# categories
"cllimit": "max",
"clshow": "!hidden",
# references
"ellimit": "max",
}
last_cont = dict()
results = dict()
idx = 0
while True:
params = query_params.copy()
params.update(last_cont)
request = self.mediawiki.wiki_request(params)
idx += 1
# print(idx)
# quick exit
if "query" not in request:
# print(request)
break
keys = [
"extracts",
"redirects",
"links",
"coordinates",
"categories",
"extlinks",
]
new_cont = request.get("continue")
request = request["query"]["pages"][self.pageid]
if not results:
results = request
else:
for key in keys:
if key in request and request.get(key) is not None:
val = request.get(key)
tmp = results.get(key)
if isinstance(tmp, (list, tuple)):
results[key] = results.get(key, list) + val
if new_cont is None or new_cont == last_cont:
break
last_cont = new_cont
# redirects
tmp = [link["title"] for link in results.get("redirects", list())]
self._redirects = sorted(tmp)
# summary
self._summary = results["extract"]
# links
tmp = [link["title"] for link in results.get("links", list())]
self._links = sorted(tmp)
# categories
def _get_cat(val):
""" parse the category correctly """
tmp = val["title"]
if tmp.startswith(self.mediawiki.category_prefix):
return tmp[len(self.mediawiki.category_prefix) + 1 :]
return tmp
tmp = [_get_cat(link) for link in results.get("categories", list())]
self._categories = sorted(tmp)
# coordinates
if "coordinates" in results:
self._coordinates = (
Decimal(results["coordinates"][0]["lat"]),
Decimal(results["coordinates"][0]["lon"]),
)
# references
tmp = [link["*"] for link in results.get("extlinks", list())]
self._references = sorted(tmp) | [
"def",
"__pull_combined_properties",
"(",
"self",
")",
":",
"query_params",
"=",
"{",
"\"titles\"",
":",
"self",
".",
"title",
",",
"\"prop\"",
":",
"\"extracts|redirects|links|coordinates|categories|extlinks\"",
",",
"\"continue\"",
":",
"dict",
"(",
")",
",",
"# summary",
"\"explaintext\"",
":",
"\"\"",
",",
"\"exintro\"",
":",
"\"\"",
",",
"# full first section for the summary!",
"# redirects",
"\"rdprop\"",
":",
"\"title\"",
",",
"\"rdlimit\"",
":",
"\"max\"",
",",
"# links",
"\"plnamespace\"",
":",
"0",
",",
"\"pllimit\"",
":",
"\"max\"",
",",
"# coordinates",
"\"colimit\"",
":",
"\"max\"",
",",
"# categories",
"\"cllimit\"",
":",
"\"max\"",
",",
"\"clshow\"",
":",
"\"!hidden\"",
",",
"# references",
"\"ellimit\"",
":",
"\"max\"",
",",
"}",
"last_cont",
"=",
"dict",
"(",
")",
"results",
"=",
"dict",
"(",
")",
"idx",
"=",
"0",
"while",
"True",
":",
"params",
"=",
"query_params",
".",
"copy",
"(",
")",
"params",
".",
"update",
"(",
"last_cont",
")",
"request",
"=",
"self",
".",
"mediawiki",
".",
"wiki_request",
"(",
"params",
")",
"idx",
"+=",
"1",
"# print(idx)",
"# quick exit",
"if",
"\"query\"",
"not",
"in",
"request",
":",
"# print(request)",
"break",
"keys",
"=",
"[",
"\"extracts\"",
",",
"\"redirects\"",
",",
"\"links\"",
",",
"\"coordinates\"",
",",
"\"categories\"",
",",
"\"extlinks\"",
",",
"]",
"new_cont",
"=",
"request",
".",
"get",
"(",
"\"continue\"",
")",
"request",
"=",
"request",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"self",
".",
"pageid",
"]",
"if",
"not",
"results",
":",
"results",
"=",
"request",
"else",
":",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
"in",
"request",
"and",
"request",
".",
"get",
"(",
"key",
")",
"is",
"not",
"None",
":",
"val",
"=",
"request",
".",
"get",
"(",
"key",
")",
"tmp",
"=",
"results",
".",
"get",
"(",
"key",
")",
"if",
"isinstance",
"(",
"tmp",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"results",
"[",
"key",
"]",
"=",
"results",
".",
"get",
"(",
"key",
",",
"list",
")",
"+",
"val",
"if",
"new_cont",
"is",
"None",
"or",
"new_cont",
"==",
"last_cont",
":",
"break",
"last_cont",
"=",
"new_cont",
"# redirects",
"tmp",
"=",
"[",
"link",
"[",
"\"title\"",
"]",
"for",
"link",
"in",
"results",
".",
"get",
"(",
"\"redirects\"",
",",
"list",
"(",
")",
")",
"]",
"self",
".",
"_redirects",
"=",
"sorted",
"(",
"tmp",
")",
"# summary",
"self",
".",
"_summary",
"=",
"results",
"[",
"\"extract\"",
"]",
"# links",
"tmp",
"=",
"[",
"link",
"[",
"\"title\"",
"]",
"for",
"link",
"in",
"results",
".",
"get",
"(",
"\"links\"",
",",
"list",
"(",
")",
")",
"]",
"self",
".",
"_links",
"=",
"sorted",
"(",
"tmp",
")",
"# categories",
"def",
"_get_cat",
"(",
"val",
")",
":",
"\"\"\" parse the category correctly \"\"\"",
"tmp",
"=",
"val",
"[",
"\"title\"",
"]",
"if",
"tmp",
".",
"startswith",
"(",
"self",
".",
"mediawiki",
".",
"category_prefix",
")",
":",
"return",
"tmp",
"[",
"len",
"(",
"self",
".",
"mediawiki",
".",
"category_prefix",
")",
"+",
"1",
":",
"]",
"return",
"tmp",
"tmp",
"=",
"[",
"_get_cat",
"(",
"link",
")",
"for",
"link",
"in",
"results",
".",
"get",
"(",
"\"categories\"",
",",
"list",
"(",
")",
")",
"]",
"self",
".",
"_categories",
"=",
"sorted",
"(",
"tmp",
")",
"# coordinates",
"if",
"\"coordinates\"",
"in",
"results",
":",
"self",
".",
"_coordinates",
"=",
"(",
"Decimal",
"(",
"results",
"[",
"\"coordinates\"",
"]",
"[",
"0",
"]",
"[",
"\"lat\"",
"]",
")",
",",
"Decimal",
"(",
"results",
"[",
"\"coordinates\"",
"]",
"[",
"0",
"]",
"[",
"\"lon\"",
"]",
")",
",",
")",
"# references",
"tmp",
"=",
"[",
"link",
"[",
"\"*\"",
"]",
"for",
"link",
"in",
"results",
".",
"get",
"(",
"\"extlinks\"",
",",
"list",
"(",
")",
")",
"]",
"self",
".",
"_references",
"=",
"sorted",
"(",
"tmp",
")"
]
| something here... | [
"something",
"here",
"..."
]
| 292e0be6c752409062dceed325d74839caf16a9b | https://github.com/barrust/mediawiki/blob/292e0be6c752409062dceed325d74839caf16a9b/mediawiki/mediawikipage.py#L707-L803 | train |
domwillcode/yale-smart-alarm-client | yalesmartalarmclient/client.py | YaleSmartAlarmClient.is_armed | def is_armed(self):
"""Return True or False if the system is armed in any way"""
alarm_code = self.get_armed_status()
if alarm_code == YALE_STATE_ARM_FULL:
return True
if alarm_code == YALE_STATE_ARM_PARTIAL:
return True
return False | python | def is_armed(self):
"""Return True or False if the system is armed in any way"""
alarm_code = self.get_armed_status()
if alarm_code == YALE_STATE_ARM_FULL:
return True
if alarm_code == YALE_STATE_ARM_PARTIAL:
return True
return False | [
"def",
"is_armed",
"(",
"self",
")",
":",
"alarm_code",
"=",
"self",
".",
"get_armed_status",
"(",
")",
"if",
"alarm_code",
"==",
"YALE_STATE_ARM_FULL",
":",
"return",
"True",
"if",
"alarm_code",
"==",
"YALE_STATE_ARM_PARTIAL",
":",
"return",
"True",
"return",
"False"
]
| Return True or False if the system is armed in any way | [
"Return",
"True",
"or",
"False",
"if",
"the",
"system",
"is",
"armed",
"in",
"any",
"way"
]
| a33b6db31440b8611c63081e231597bf0629e098 | https://github.com/domwillcode/yale-smart-alarm-client/blob/a33b6db31440b8611c63081e231597bf0629e098/yalesmartalarmclient/client.py#L110-L120 | train |
springload/draftjs_exporter | example.py | linkify | def linkify(props):
"""
Wrap plain URLs with link tags.
"""
match = props['match']
protocol = match.group(1)
url = match.group(2)
href = protocol + url
if props['block']['type'] == BLOCK_TYPES.CODE:
return href
link_props = {
'href': href,
}
if href.startswith('www'):
link_props['href'] = 'http://' + href
return DOM.create_element('a', link_props, href) | python | def linkify(props):
"""
Wrap plain URLs with link tags.
"""
match = props['match']
protocol = match.group(1)
url = match.group(2)
href = protocol + url
if props['block']['type'] == BLOCK_TYPES.CODE:
return href
link_props = {
'href': href,
}
if href.startswith('www'):
link_props['href'] = 'http://' + href
return DOM.create_element('a', link_props, href) | [
"def",
"linkify",
"(",
"props",
")",
":",
"match",
"=",
"props",
"[",
"'match'",
"]",
"protocol",
"=",
"match",
".",
"group",
"(",
"1",
")",
"url",
"=",
"match",
".",
"group",
"(",
"2",
")",
"href",
"=",
"protocol",
"+",
"url",
"if",
"props",
"[",
"'block'",
"]",
"[",
"'type'",
"]",
"==",
"BLOCK_TYPES",
".",
"CODE",
":",
"return",
"href",
"link_props",
"=",
"{",
"'href'",
":",
"href",
",",
"}",
"if",
"href",
".",
"startswith",
"(",
"'www'",
")",
":",
"link_props",
"[",
"'href'",
"]",
"=",
"'http://'",
"+",
"href",
"return",
"DOM",
".",
"create_element",
"(",
"'a'",
",",
"link_props",
",",
"href",
")"
]
| Wrap plain URLs with link tags. | [
"Wrap",
"plain",
"URLs",
"with",
"link",
"tags",
"."
]
| 1e391a46f162740f90511cde1ef615858e8de5cb | https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/example.py#L85-L104 | train |
springload/draftjs_exporter | draftjs_exporter/options.py | Options.for_kind | def for_kind(kind_map, type_, fallback_key):
"""
Create an Options object from any mapping.
"""
if type_ not in kind_map:
if fallback_key not in kind_map:
raise ConfigException('"%s" is not in the config and has no fallback' % type_)
config = kind_map[fallback_key]
else:
config = kind_map[type_]
if isinstance(config, dict):
if 'element' not in config:
raise ConfigException('"%s" does not define an element' % type_)
opts = Options(type_, **config)
else:
opts = Options(type_, config)
return opts | python | def for_kind(kind_map, type_, fallback_key):
"""
Create an Options object from any mapping.
"""
if type_ not in kind_map:
if fallback_key not in kind_map:
raise ConfigException('"%s" is not in the config and has no fallback' % type_)
config = kind_map[fallback_key]
else:
config = kind_map[type_]
if isinstance(config, dict):
if 'element' not in config:
raise ConfigException('"%s" does not define an element' % type_)
opts = Options(type_, **config)
else:
opts = Options(type_, config)
return opts | [
"def",
"for_kind",
"(",
"kind_map",
",",
"type_",
",",
"fallback_key",
")",
":",
"if",
"type_",
"not",
"in",
"kind_map",
":",
"if",
"fallback_key",
"not",
"in",
"kind_map",
":",
"raise",
"ConfigException",
"(",
"'\"%s\" is not in the config and has no fallback'",
"%",
"type_",
")",
"config",
"=",
"kind_map",
"[",
"fallback_key",
"]",
"else",
":",
"config",
"=",
"kind_map",
"[",
"type_",
"]",
"if",
"isinstance",
"(",
"config",
",",
"dict",
")",
":",
"if",
"'element'",
"not",
"in",
"config",
":",
"raise",
"ConfigException",
"(",
"'\"%s\" does not define an element'",
"%",
"type_",
")",
"opts",
"=",
"Options",
"(",
"type_",
",",
"*",
"*",
"config",
")",
"else",
":",
"opts",
"=",
"Options",
"(",
"type_",
",",
"config",
")",
"return",
"opts"
]
| Create an Options object from any mapping. | [
"Create",
"an",
"Options",
"object",
"from",
"any",
"mapping",
"."
]
| 1e391a46f162740f90511cde1ef615858e8de5cb | https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/options.py#L31-L51 | train |
springload/draftjs_exporter | draftjs_exporter/html.py | HTML.render | def render(self, content_state=None):
"""
Starts the export process on a given piece of content state.
"""
if content_state is None:
content_state = {}
blocks = content_state.get('blocks', [])
wrapper_state = WrapperState(self.block_map, blocks)
document = DOM.create_element()
entity_map = content_state.get('entityMap', {})
min_depth = 0
for block in blocks:
depth = block['depth']
elt = self.render_block(block, entity_map, wrapper_state)
if depth > min_depth:
min_depth = depth
# At level 0, append the element to the document.
if depth == 0:
DOM.append_child(document, elt)
# If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document.
if min_depth > 0 and wrapper_state.stack.length() != 0:
DOM.append_child(document, wrapper_state.stack.tail().elt)
return DOM.render(document) | python | def render(self, content_state=None):
"""
Starts the export process on a given piece of content state.
"""
if content_state is None:
content_state = {}
blocks = content_state.get('blocks', [])
wrapper_state = WrapperState(self.block_map, blocks)
document = DOM.create_element()
entity_map = content_state.get('entityMap', {})
min_depth = 0
for block in blocks:
depth = block['depth']
elt = self.render_block(block, entity_map, wrapper_state)
if depth > min_depth:
min_depth = depth
# At level 0, append the element to the document.
if depth == 0:
DOM.append_child(document, elt)
# If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document.
if min_depth > 0 and wrapper_state.stack.length() != 0:
DOM.append_child(document, wrapper_state.stack.tail().elt)
return DOM.render(document) | [
"def",
"render",
"(",
"self",
",",
"content_state",
"=",
"None",
")",
":",
"if",
"content_state",
"is",
"None",
":",
"content_state",
"=",
"{",
"}",
"blocks",
"=",
"content_state",
".",
"get",
"(",
"'blocks'",
",",
"[",
"]",
")",
"wrapper_state",
"=",
"WrapperState",
"(",
"self",
".",
"block_map",
",",
"blocks",
")",
"document",
"=",
"DOM",
".",
"create_element",
"(",
")",
"entity_map",
"=",
"content_state",
".",
"get",
"(",
"'entityMap'",
",",
"{",
"}",
")",
"min_depth",
"=",
"0",
"for",
"block",
"in",
"blocks",
":",
"depth",
"=",
"block",
"[",
"'depth'",
"]",
"elt",
"=",
"self",
".",
"render_block",
"(",
"block",
",",
"entity_map",
",",
"wrapper_state",
")",
"if",
"depth",
">",
"min_depth",
":",
"min_depth",
"=",
"depth",
"# At level 0, append the element to the document.",
"if",
"depth",
"==",
"0",
":",
"DOM",
".",
"append_child",
"(",
"document",
",",
"elt",
")",
"# If there is no block at depth 0, we need to add the wrapper that contains the whole tree to the document.",
"if",
"min_depth",
">",
"0",
"and",
"wrapper_state",
".",
"stack",
".",
"length",
"(",
")",
"!=",
"0",
":",
"DOM",
".",
"append_child",
"(",
"document",
",",
"wrapper_state",
".",
"stack",
".",
"tail",
"(",
")",
".",
"elt",
")",
"return",
"DOM",
".",
"render",
"(",
"document",
")"
]
| Starts the export process on a given piece of content state. | [
"Starts",
"the",
"export",
"process",
"on",
"a",
"given",
"piece",
"of",
"content",
"state",
"."
]
| 1e391a46f162740f90511cde1ef615858e8de5cb | https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L31-L59 | train |
springload/draftjs_exporter | draftjs_exporter/html.py | HTML.build_command_groups | def build_command_groups(self, block):
"""
Creates block modification commands, grouped by start index,
with the text to apply them on.
"""
text = block['text']
commands = sorted(self.build_commands(block))
grouped = groupby(commands, Command.key)
listed = list(groupby(commands, Command.key))
sliced = []
i = 0
for start_index, commands in grouped:
if i < len(listed) - 1:
stop_index = listed[i + 1][0]
sliced.append((text[start_index:stop_index], list(commands)))
else:
sliced.append((text[start_index:start_index], list(commands)))
i += 1
return sliced | python | def build_command_groups(self, block):
"""
Creates block modification commands, grouped by start index,
with the text to apply them on.
"""
text = block['text']
commands = sorted(self.build_commands(block))
grouped = groupby(commands, Command.key)
listed = list(groupby(commands, Command.key))
sliced = []
i = 0
for start_index, commands in grouped:
if i < len(listed) - 1:
stop_index = listed[i + 1][0]
sliced.append((text[start_index:stop_index], list(commands)))
else:
sliced.append((text[start_index:start_index], list(commands)))
i += 1
return sliced | [
"def",
"build_command_groups",
"(",
"self",
",",
"block",
")",
":",
"text",
"=",
"block",
"[",
"'text'",
"]",
"commands",
"=",
"sorted",
"(",
"self",
".",
"build_commands",
"(",
"block",
")",
")",
"grouped",
"=",
"groupby",
"(",
"commands",
",",
"Command",
".",
"key",
")",
"listed",
"=",
"list",
"(",
"groupby",
"(",
"commands",
",",
"Command",
".",
"key",
")",
")",
"sliced",
"=",
"[",
"]",
"i",
"=",
"0",
"for",
"start_index",
",",
"commands",
"in",
"grouped",
":",
"if",
"i",
"<",
"len",
"(",
"listed",
")",
"-",
"1",
":",
"stop_index",
"=",
"listed",
"[",
"i",
"+",
"1",
"]",
"[",
"0",
"]",
"sliced",
".",
"append",
"(",
"(",
"text",
"[",
"start_index",
":",
"stop_index",
"]",
",",
"list",
"(",
"commands",
")",
")",
")",
"else",
":",
"sliced",
".",
"append",
"(",
"(",
"text",
"[",
"start_index",
":",
"start_index",
"]",
",",
"list",
"(",
"commands",
")",
")",
")",
"i",
"+=",
"1",
"return",
"sliced"
]
| Creates block modification commands, grouped by start index,
with the text to apply them on. | [
"Creates",
"block",
"modification",
"commands",
"grouped",
"by",
"start",
"index",
"with",
"the",
"text",
"to",
"apply",
"them",
"on",
"."
]
| 1e391a46f162740f90511cde1ef615858e8de5cb | https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L95-L116 | train |
springload/draftjs_exporter | draftjs_exporter/html.py | HTML.build_commands | def build_commands(self, block):
"""
Build all of the manipulation commands for a given block.
- One pair to set the text.
- Multiple pairs for styles.
- Multiple pairs for entities.
"""
text_commands = Command.start_stop('text', 0, len(block['text']))
style_commands = self.build_style_commands(block)
entity_commands = self.build_entity_commands(block)
return text_commands + style_commands + entity_commands | python | def build_commands(self, block):
"""
Build all of the manipulation commands for a given block.
- One pair to set the text.
- Multiple pairs for styles.
- Multiple pairs for entities.
"""
text_commands = Command.start_stop('text', 0, len(block['text']))
style_commands = self.build_style_commands(block)
entity_commands = self.build_entity_commands(block)
return text_commands + style_commands + entity_commands | [
"def",
"build_commands",
"(",
"self",
",",
"block",
")",
":",
"text_commands",
"=",
"Command",
".",
"start_stop",
"(",
"'text'",
",",
"0",
",",
"len",
"(",
"block",
"[",
"'text'",
"]",
")",
")",
"style_commands",
"=",
"self",
".",
"build_style_commands",
"(",
"block",
")",
"entity_commands",
"=",
"self",
".",
"build_entity_commands",
"(",
"block",
")",
"return",
"text_commands",
"+",
"style_commands",
"+",
"entity_commands"
]
| Build all of the manipulation commands for a given block.
- One pair to set the text.
- Multiple pairs for styles.
- Multiple pairs for entities. | [
"Build",
"all",
"of",
"the",
"manipulation",
"commands",
"for",
"a",
"given",
"block",
".",
"-",
"One",
"pair",
"to",
"set",
"the",
"text",
".",
"-",
"Multiple",
"pairs",
"for",
"styles",
".",
"-",
"Multiple",
"pairs",
"for",
"entities",
"."
]
| 1e391a46f162740f90511cde1ef615858e8de5cb | https://github.com/springload/draftjs_exporter/blob/1e391a46f162740f90511cde1ef615858e8de5cb/draftjs_exporter/html.py#L118-L129 | train |
sloria/doitlive | doitlive/cli.py | run | def run(
commands,
shell=None,
prompt_template="default",
speed=1,
quiet=False,
test_mode=False,
commentecho=False,
):
"""Main function for "magic-running" a list of commands."""
if not quiet:
secho("We'll do it live!", fg="red", bold=True)
secho(
"STARTING SESSION: Press Ctrl-C at any time to exit.",
fg="yellow",
bold=True,
)
click.pause()
click.clear()
state = SessionState(
shell=shell,
prompt_template=prompt_template,
speed=speed,
test_mode=test_mode,
commentecho=commentecho,
)
i = 0
while i < len(commands):
command = commands[i].strip()
i += 1
if not command:
continue
is_comment = command.startswith("#")
if not is_comment:
command_as_list = shlex.split(ensure_utf8(command))
else:
command_as_list = None
shell_match = SHELL_RE.match(command)
if is_comment:
# Parse comment magic
match = OPTION_RE.match(command)
if match:
option, arg = match.group("option"), match.group("arg")
func = OPTION_MAP[option]
func(state, arg)
elif state.commentecho():
comment = command.lstrip("#")
secho(comment, fg="yellow", bold=True)
continue
# Handle 'export' and 'alias' commands by storing them in SessionState
elif command_as_list and command_as_list[0] in ["alias", "export"]:
magictype(
command, prompt_template=state["prompt_template"], speed=state["speed"]
)
# Store the raw commands instead of using add_envvar and add_alias
# to avoid having to parse the command ourselves
state.add_command(command)
# Handle ```python and ```ipython by running "player" consoles
elif shell_match:
shell_name = shell_match.groups()[0].strip()
py_commands = []
more = True
while more: # slurp up all the python code
try:
py_command = commands[i].rstrip()
except IndexError:
raise SessionError(
"Unmatched {0} code block in "
"session file.".format(shell_name)
)
i += 1
if py_command.startswith("```"):
i += 1
more = False
else:
py_commands.append(py_command)
# Run the player console
magictype(
shell_name,
prompt_template=state["prompt_template"],
speed=state["speed"],
)
if shell_name == "ipython":
try:
from doitlive.ipython_consoles import start_ipython_player
except ImportError:
raise RuntimeError(
"```ipython blocks require IPython to be installed"
)
# dedent all the commands to account for IPython's autoindentation
ipy_commands = [textwrap.dedent(cmd) for cmd in py_commands]
start_ipython_player(ipy_commands, speed=state["speed"])
else:
start_python_player(py_commands, speed=state["speed"])
else:
# goto_stealthmode determines when to switch to stealthmode
goto_stealthmode = magicrun(command, **state)
# stealthmode allows user to type live commands outside of automated script
i -= stealthmode(state, goto_stealthmode)
echo_prompt(state["prompt_template"])
wait_for(RETURNS)
if not quiet:
secho("FINISHED SESSION", fg="yellow", bold=True) | python | def run(
commands,
shell=None,
prompt_template="default",
speed=1,
quiet=False,
test_mode=False,
commentecho=False,
):
"""Main function for "magic-running" a list of commands."""
if not quiet:
secho("We'll do it live!", fg="red", bold=True)
secho(
"STARTING SESSION: Press Ctrl-C at any time to exit.",
fg="yellow",
bold=True,
)
click.pause()
click.clear()
state = SessionState(
shell=shell,
prompt_template=prompt_template,
speed=speed,
test_mode=test_mode,
commentecho=commentecho,
)
i = 0
while i < len(commands):
command = commands[i].strip()
i += 1
if not command:
continue
is_comment = command.startswith("#")
if not is_comment:
command_as_list = shlex.split(ensure_utf8(command))
else:
command_as_list = None
shell_match = SHELL_RE.match(command)
if is_comment:
# Parse comment magic
match = OPTION_RE.match(command)
if match:
option, arg = match.group("option"), match.group("arg")
func = OPTION_MAP[option]
func(state, arg)
elif state.commentecho():
comment = command.lstrip("#")
secho(comment, fg="yellow", bold=True)
continue
# Handle 'export' and 'alias' commands by storing them in SessionState
elif command_as_list and command_as_list[0] in ["alias", "export"]:
magictype(
command, prompt_template=state["prompt_template"], speed=state["speed"]
)
# Store the raw commands instead of using add_envvar and add_alias
# to avoid having to parse the command ourselves
state.add_command(command)
# Handle ```python and ```ipython by running "player" consoles
elif shell_match:
shell_name = shell_match.groups()[0].strip()
py_commands = []
more = True
while more: # slurp up all the python code
try:
py_command = commands[i].rstrip()
except IndexError:
raise SessionError(
"Unmatched {0} code block in "
"session file.".format(shell_name)
)
i += 1
if py_command.startswith("```"):
i += 1
more = False
else:
py_commands.append(py_command)
# Run the player console
magictype(
shell_name,
prompt_template=state["prompt_template"],
speed=state["speed"],
)
if shell_name == "ipython":
try:
from doitlive.ipython_consoles import start_ipython_player
except ImportError:
raise RuntimeError(
"```ipython blocks require IPython to be installed"
)
# dedent all the commands to account for IPython's autoindentation
ipy_commands = [textwrap.dedent(cmd) for cmd in py_commands]
start_ipython_player(ipy_commands, speed=state["speed"])
else:
start_python_player(py_commands, speed=state["speed"])
else:
# goto_stealthmode determines when to switch to stealthmode
goto_stealthmode = magicrun(command, **state)
# stealthmode allows user to type live commands outside of automated script
i -= stealthmode(state, goto_stealthmode)
echo_prompt(state["prompt_template"])
wait_for(RETURNS)
if not quiet:
secho("FINISHED SESSION", fg="yellow", bold=True) | [
"def",
"run",
"(",
"commands",
",",
"shell",
"=",
"None",
",",
"prompt_template",
"=",
"\"default\"",
",",
"speed",
"=",
"1",
",",
"quiet",
"=",
"False",
",",
"test_mode",
"=",
"False",
",",
"commentecho",
"=",
"False",
",",
")",
":",
"if",
"not",
"quiet",
":",
"secho",
"(",
"\"We'll do it live!\"",
",",
"fg",
"=",
"\"red\"",
",",
"bold",
"=",
"True",
")",
"secho",
"(",
"\"STARTING SESSION: Press Ctrl-C at any time to exit.\"",
",",
"fg",
"=",
"\"yellow\"",
",",
"bold",
"=",
"True",
",",
")",
"click",
".",
"pause",
"(",
")",
"click",
".",
"clear",
"(",
")",
"state",
"=",
"SessionState",
"(",
"shell",
"=",
"shell",
",",
"prompt_template",
"=",
"prompt_template",
",",
"speed",
"=",
"speed",
",",
"test_mode",
"=",
"test_mode",
",",
"commentecho",
"=",
"commentecho",
",",
")",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"commands",
")",
":",
"command",
"=",
"commands",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"i",
"+=",
"1",
"if",
"not",
"command",
":",
"continue",
"is_comment",
"=",
"command",
".",
"startswith",
"(",
"\"#\"",
")",
"if",
"not",
"is_comment",
":",
"command_as_list",
"=",
"shlex",
".",
"split",
"(",
"ensure_utf8",
"(",
"command",
")",
")",
"else",
":",
"command_as_list",
"=",
"None",
"shell_match",
"=",
"SHELL_RE",
".",
"match",
"(",
"command",
")",
"if",
"is_comment",
":",
"# Parse comment magic",
"match",
"=",
"OPTION_RE",
".",
"match",
"(",
"command",
")",
"if",
"match",
":",
"option",
",",
"arg",
"=",
"match",
".",
"group",
"(",
"\"option\"",
")",
",",
"match",
".",
"group",
"(",
"\"arg\"",
")",
"func",
"=",
"OPTION_MAP",
"[",
"option",
"]",
"func",
"(",
"state",
",",
"arg",
")",
"elif",
"state",
".",
"commentecho",
"(",
")",
":",
"comment",
"=",
"command",
".",
"lstrip",
"(",
"\"#\"",
")",
"secho",
"(",
"comment",
",",
"fg",
"=",
"\"yellow\"",
",",
"bold",
"=",
"True",
")",
"continue",
"# Handle 'export' and 'alias' commands by storing them in SessionState",
"elif",
"command_as_list",
"and",
"command_as_list",
"[",
"0",
"]",
"in",
"[",
"\"alias\"",
",",
"\"export\"",
"]",
":",
"magictype",
"(",
"command",
",",
"prompt_template",
"=",
"state",
"[",
"\"prompt_template\"",
"]",
",",
"speed",
"=",
"state",
"[",
"\"speed\"",
"]",
")",
"# Store the raw commands instead of using add_envvar and add_alias",
"# to avoid having to parse the command ourselves",
"state",
".",
"add_command",
"(",
"command",
")",
"# Handle ```python and ```ipython by running \"player\" consoles",
"elif",
"shell_match",
":",
"shell_name",
"=",
"shell_match",
".",
"groups",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"py_commands",
"=",
"[",
"]",
"more",
"=",
"True",
"while",
"more",
":",
"# slurp up all the python code",
"try",
":",
"py_command",
"=",
"commands",
"[",
"i",
"]",
".",
"rstrip",
"(",
")",
"except",
"IndexError",
":",
"raise",
"SessionError",
"(",
"\"Unmatched {0} code block in \"",
"\"session file.\"",
".",
"format",
"(",
"shell_name",
")",
")",
"i",
"+=",
"1",
"if",
"py_command",
".",
"startswith",
"(",
"\"```\"",
")",
":",
"i",
"+=",
"1",
"more",
"=",
"False",
"else",
":",
"py_commands",
".",
"append",
"(",
"py_command",
")",
"# Run the player console",
"magictype",
"(",
"shell_name",
",",
"prompt_template",
"=",
"state",
"[",
"\"prompt_template\"",
"]",
",",
"speed",
"=",
"state",
"[",
"\"speed\"",
"]",
",",
")",
"if",
"shell_name",
"==",
"\"ipython\"",
":",
"try",
":",
"from",
"doitlive",
".",
"ipython_consoles",
"import",
"start_ipython_player",
"except",
"ImportError",
":",
"raise",
"RuntimeError",
"(",
"\"```ipython blocks require IPython to be installed\"",
")",
"# dedent all the commands to account for IPython's autoindentation",
"ipy_commands",
"=",
"[",
"textwrap",
".",
"dedent",
"(",
"cmd",
")",
"for",
"cmd",
"in",
"py_commands",
"]",
"start_ipython_player",
"(",
"ipy_commands",
",",
"speed",
"=",
"state",
"[",
"\"speed\"",
"]",
")",
"else",
":",
"start_python_player",
"(",
"py_commands",
",",
"speed",
"=",
"state",
"[",
"\"speed\"",
"]",
")",
"else",
":",
"# goto_stealthmode determines when to switch to stealthmode",
"goto_stealthmode",
"=",
"magicrun",
"(",
"command",
",",
"*",
"*",
"state",
")",
"# stealthmode allows user to type live commands outside of automated script",
"i",
"-=",
"stealthmode",
"(",
"state",
",",
"goto_stealthmode",
")",
"echo_prompt",
"(",
"state",
"[",
"\"prompt_template\"",
"]",
")",
"wait_for",
"(",
"RETURNS",
")",
"if",
"not",
"quiet",
":",
"secho",
"(",
"\"FINISHED SESSION\"",
",",
"fg",
"=",
"\"yellow\"",
",",
"bold",
"=",
"True",
")"
]
| Main function for "magic-running" a list of commands. | [
"Main",
"function",
"for",
"magic",
"-",
"running",
"a",
"list",
"of",
"commands",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L146-L251 | train |
sloria/doitlive | doitlive/cli.py | play | def play(quiet, session_file, shell, speed, prompt, commentecho):
"""Play a session file."""
run(
session_file.readlines(),
shell=shell,
speed=speed,
quiet=quiet,
test_mode=TESTING,
prompt_template=prompt,
commentecho=commentecho,
) | python | def play(quiet, session_file, shell, speed, prompt, commentecho):
"""Play a session file."""
run(
session_file.readlines(),
shell=shell,
speed=speed,
quiet=quiet,
test_mode=TESTING,
prompt_template=prompt,
commentecho=commentecho,
) | [
"def",
"play",
"(",
"quiet",
",",
"session_file",
",",
"shell",
",",
"speed",
",",
"prompt",
",",
"commentecho",
")",
":",
"run",
"(",
"session_file",
".",
"readlines",
"(",
")",
",",
"shell",
"=",
"shell",
",",
"speed",
"=",
"speed",
",",
"quiet",
"=",
"quiet",
",",
"test_mode",
"=",
"TESTING",
",",
"prompt_template",
"=",
"prompt",
",",
"commentecho",
"=",
"commentecho",
",",
")"
]
| Play a session file. | [
"Play",
"a",
"session",
"file",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L419-L429 | train |
sloria/doitlive | doitlive/cli.py | demo | def demo(quiet, shell, speed, prompt, commentecho):
"""Run a demo doitlive session."""
run(
DEMO,
shell=shell,
speed=speed,
test_mode=TESTING,
prompt_template=prompt,
quiet=quiet,
commentecho=commentecho,
) | python | def demo(quiet, shell, speed, prompt, commentecho):
"""Run a demo doitlive session."""
run(
DEMO,
shell=shell,
speed=speed,
test_mode=TESTING,
prompt_template=prompt,
quiet=quiet,
commentecho=commentecho,
) | [
"def",
"demo",
"(",
"quiet",
",",
"shell",
",",
"speed",
",",
"prompt",
",",
"commentecho",
")",
":",
"run",
"(",
"DEMO",
",",
"shell",
"=",
"shell",
",",
"speed",
"=",
"speed",
",",
"test_mode",
"=",
"TESTING",
",",
"prompt_template",
"=",
"prompt",
",",
"quiet",
"=",
"quiet",
",",
"commentecho",
"=",
"commentecho",
",",
")"
]
| Run a demo doitlive session. | [
"Run",
"a",
"demo",
"doitlive",
"session",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/cli.py#L442-L452 | train |
sloria/doitlive | doitlive/styling.py | echo | def echo(
message=None, file=None, nl=True, err=False, color=None, carriage_return=False
):
"""
Patched click echo function.
"""
message = message or ""
if carriage_return and nl:
click_echo(message + "\r\n", file, False, err, color)
elif carriage_return and not nl:
click_echo(message + "\r", file, False, err, color)
else:
click_echo(message, file, nl, err, color) | python | def echo(
message=None, file=None, nl=True, err=False, color=None, carriage_return=False
):
"""
Patched click echo function.
"""
message = message or ""
if carriage_return and nl:
click_echo(message + "\r\n", file, False, err, color)
elif carriage_return and not nl:
click_echo(message + "\r", file, False, err, color)
else:
click_echo(message, file, nl, err, color) | [
"def",
"echo",
"(",
"message",
"=",
"None",
",",
"file",
"=",
"None",
",",
"nl",
"=",
"True",
",",
"err",
"=",
"False",
",",
"color",
"=",
"None",
",",
"carriage_return",
"=",
"False",
")",
":",
"message",
"=",
"message",
"or",
"\"\"",
"if",
"carriage_return",
"and",
"nl",
":",
"click_echo",
"(",
"message",
"+",
"\"\\r\\n\"",
",",
"file",
",",
"False",
",",
"err",
",",
"color",
")",
"elif",
"carriage_return",
"and",
"not",
"nl",
":",
"click_echo",
"(",
"message",
"+",
"\"\\r\"",
",",
"file",
",",
"False",
",",
"err",
",",
"color",
")",
"else",
":",
"click_echo",
"(",
"message",
",",
"file",
",",
"nl",
",",
"err",
",",
"color",
")"
]
| Patched click echo function. | [
"Patched",
"click",
"echo",
"function",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/styling.py#L201-L213 | train |
sloria/doitlive | doitlive/keyboard.py | magictype | def magictype(text, prompt_template="default", speed=1):
"""Echo each character in ``text`` as keyboard characters are pressed.
Characters are echo'd ``speed`` characters at a time.
"""
echo_prompt(prompt_template)
cursor_position = 0
return_to_regular_type = False
with raw_mode():
while True:
char = text[cursor_position : cursor_position + speed]
in_char = getchar()
if in_char in {ESC, CTRLC}:
echo(carriage_return=True)
raise click.Abort()
elif in_char == TAB:
return_to_regular_type = True
break
elif in_char == BACKSPACE:
if cursor_position > 0:
echo("\b \b", nl=False)
cursor_position -= 1
elif in_char in RETURNS:
# Only return at end of command
if cursor_position >= len(text):
echo("\r", nl=True)
break
elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"):
# Background process
os.kill(0, signal.SIGTSTP)
# When doitlive is back in foreground, clear the terminal
# and resume where we left off
click.clear()
echo_prompt(prompt_template)
echo(text[:cursor_position], nl=False)
else:
if cursor_position < len(text):
echo(char, nl=False)
increment = min([speed, len(text) - cursor_position])
cursor_position += increment
return return_to_regular_type | python | def magictype(text, prompt_template="default", speed=1):
"""Echo each character in ``text`` as keyboard characters are pressed.
Characters are echo'd ``speed`` characters at a time.
"""
echo_prompt(prompt_template)
cursor_position = 0
return_to_regular_type = False
with raw_mode():
while True:
char = text[cursor_position : cursor_position + speed]
in_char = getchar()
if in_char in {ESC, CTRLC}:
echo(carriage_return=True)
raise click.Abort()
elif in_char == TAB:
return_to_regular_type = True
break
elif in_char == BACKSPACE:
if cursor_position > 0:
echo("\b \b", nl=False)
cursor_position -= 1
elif in_char in RETURNS:
# Only return at end of command
if cursor_position >= len(text):
echo("\r", nl=True)
break
elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"):
# Background process
os.kill(0, signal.SIGTSTP)
# When doitlive is back in foreground, clear the terminal
# and resume where we left off
click.clear()
echo_prompt(prompt_template)
echo(text[:cursor_position], nl=False)
else:
if cursor_position < len(text):
echo(char, nl=False)
increment = min([speed, len(text) - cursor_position])
cursor_position += increment
return return_to_regular_type | [
"def",
"magictype",
"(",
"text",
",",
"prompt_template",
"=",
"\"default\"",
",",
"speed",
"=",
"1",
")",
":",
"echo_prompt",
"(",
"prompt_template",
")",
"cursor_position",
"=",
"0",
"return_to_regular_type",
"=",
"False",
"with",
"raw_mode",
"(",
")",
":",
"while",
"True",
":",
"char",
"=",
"text",
"[",
"cursor_position",
":",
"cursor_position",
"+",
"speed",
"]",
"in_char",
"=",
"getchar",
"(",
")",
"if",
"in_char",
"in",
"{",
"ESC",
",",
"CTRLC",
"}",
":",
"echo",
"(",
"carriage_return",
"=",
"True",
")",
"raise",
"click",
".",
"Abort",
"(",
")",
"elif",
"in_char",
"==",
"TAB",
":",
"return_to_regular_type",
"=",
"True",
"break",
"elif",
"in_char",
"==",
"BACKSPACE",
":",
"if",
"cursor_position",
">",
"0",
":",
"echo",
"(",
"\"\\b \\b\"",
",",
"nl",
"=",
"False",
")",
"cursor_position",
"-=",
"1",
"elif",
"in_char",
"in",
"RETURNS",
":",
"# Only return at end of command",
"if",
"cursor_position",
">=",
"len",
"(",
"text",
")",
":",
"echo",
"(",
"\"\\r\"",
",",
"nl",
"=",
"True",
")",
"break",
"elif",
"in_char",
"==",
"CTRLZ",
"and",
"hasattr",
"(",
"signal",
",",
"\"SIGTSTP\"",
")",
":",
"# Background process",
"os",
".",
"kill",
"(",
"0",
",",
"signal",
".",
"SIGTSTP",
")",
"# When doitlive is back in foreground, clear the terminal",
"# and resume where we left off",
"click",
".",
"clear",
"(",
")",
"echo_prompt",
"(",
"prompt_template",
")",
"echo",
"(",
"text",
"[",
":",
"cursor_position",
"]",
",",
"nl",
"=",
"False",
")",
"else",
":",
"if",
"cursor_position",
"<",
"len",
"(",
"text",
")",
":",
"echo",
"(",
"char",
",",
"nl",
"=",
"False",
")",
"increment",
"=",
"min",
"(",
"[",
"speed",
",",
"len",
"(",
"text",
")",
"-",
"cursor_position",
"]",
")",
"cursor_position",
"+=",
"increment",
"return",
"return_to_regular_type"
]
| Echo each character in ``text`` as keyboard characters are pressed.
Characters are echo'd ``speed`` characters at a time. | [
"Echo",
"each",
"character",
"in",
"text",
"as",
"keyboard",
"characters",
"are",
"pressed",
".",
"Characters",
"are",
"echo",
"d",
"speed",
"characters",
"at",
"a",
"time",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L36-L75 | train |
sloria/doitlive | doitlive/keyboard.py | regulartype | def regulartype(prompt_template="default"):
"""Echo each character typed. Unlike magictype, this echos the characters the
user is pressing.
Returns: command_string | The command to be passed to the shell to run. This is
| typed by the user.
"""
echo_prompt(prompt_template)
command_string = ""
cursor_position = 0
with raw_mode():
while True:
in_char = getchar()
if in_char in {ESC, CTRLC}:
echo(carriage_return=True)
raise click.Abort()
elif in_char == TAB:
echo("\r", nl=True)
return in_char
elif in_char == BACKSPACE:
if cursor_position > 0:
echo("\b \b", nl=False)
command_string = command_string[:-1]
cursor_position -= 1
elif in_char in RETURNS:
echo("\r", nl=True)
return command_string
elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"):
# Background process
os.kill(0, signal.SIGTSTP)
# When doitlive is back in foreground, clear the terminal
# and resume where we left off
click.clear()
echo_prompt(prompt_template)
else:
echo(in_char, nl=False)
command_string += in_char
cursor_position += 1 | python | def regulartype(prompt_template="default"):
"""Echo each character typed. Unlike magictype, this echos the characters the
user is pressing.
Returns: command_string | The command to be passed to the shell to run. This is
| typed by the user.
"""
echo_prompt(prompt_template)
command_string = ""
cursor_position = 0
with raw_mode():
while True:
in_char = getchar()
if in_char in {ESC, CTRLC}:
echo(carriage_return=True)
raise click.Abort()
elif in_char == TAB:
echo("\r", nl=True)
return in_char
elif in_char == BACKSPACE:
if cursor_position > 0:
echo("\b \b", nl=False)
command_string = command_string[:-1]
cursor_position -= 1
elif in_char in RETURNS:
echo("\r", nl=True)
return command_string
elif in_char == CTRLZ and hasattr(signal, "SIGTSTP"):
# Background process
os.kill(0, signal.SIGTSTP)
# When doitlive is back in foreground, clear the terminal
# and resume where we left off
click.clear()
echo_prompt(prompt_template)
else:
echo(in_char, nl=False)
command_string += in_char
cursor_position += 1 | [
"def",
"regulartype",
"(",
"prompt_template",
"=",
"\"default\"",
")",
":",
"echo_prompt",
"(",
"prompt_template",
")",
"command_string",
"=",
"\"\"",
"cursor_position",
"=",
"0",
"with",
"raw_mode",
"(",
")",
":",
"while",
"True",
":",
"in_char",
"=",
"getchar",
"(",
")",
"if",
"in_char",
"in",
"{",
"ESC",
",",
"CTRLC",
"}",
":",
"echo",
"(",
"carriage_return",
"=",
"True",
")",
"raise",
"click",
".",
"Abort",
"(",
")",
"elif",
"in_char",
"==",
"TAB",
":",
"echo",
"(",
"\"\\r\"",
",",
"nl",
"=",
"True",
")",
"return",
"in_char",
"elif",
"in_char",
"==",
"BACKSPACE",
":",
"if",
"cursor_position",
">",
"0",
":",
"echo",
"(",
"\"\\b \\b\"",
",",
"nl",
"=",
"False",
")",
"command_string",
"=",
"command_string",
"[",
":",
"-",
"1",
"]",
"cursor_position",
"-=",
"1",
"elif",
"in_char",
"in",
"RETURNS",
":",
"echo",
"(",
"\"\\r\"",
",",
"nl",
"=",
"True",
")",
"return",
"command_string",
"elif",
"in_char",
"==",
"CTRLZ",
"and",
"hasattr",
"(",
"signal",
",",
"\"SIGTSTP\"",
")",
":",
"# Background process",
"os",
".",
"kill",
"(",
"0",
",",
"signal",
".",
"SIGTSTP",
")",
"# When doitlive is back in foreground, clear the terminal",
"# and resume where we left off",
"click",
".",
"clear",
"(",
")",
"echo_prompt",
"(",
"prompt_template",
")",
"else",
":",
"echo",
"(",
"in_char",
",",
"nl",
"=",
"False",
")",
"command_string",
"+=",
"in_char",
"cursor_position",
"+=",
"1"
]
| Echo each character typed. Unlike magictype, this echos the characters the
user is pressing.
Returns: command_string | The command to be passed to the shell to run. This is
| typed by the user. | [
"Echo",
"each",
"character",
"typed",
".",
"Unlike",
"magictype",
"this",
"echos",
"the",
"characters",
"the",
"user",
"is",
"pressing",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L134-L171 | train |
sloria/doitlive | doitlive/keyboard.py | regularrun | def regularrun(
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
"""Allow user to run their own live commands until CTRL-Z is pressed again.
"""
loop_again = True
command_string = regulartype(prompt_template)
if command_string == TAB:
loop_again = False
return loop_again
run_command(
command_string,
shell,
aliases=aliases,
envvars=envvars,
extra_commands=extra_commands,
test_mode=test_mode,
)
return loop_again | python | def regularrun(
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
"""Allow user to run their own live commands until CTRL-Z is pressed again.
"""
loop_again = True
command_string = regulartype(prompt_template)
if command_string == TAB:
loop_again = False
return loop_again
run_command(
command_string,
shell,
aliases=aliases,
envvars=envvars,
extra_commands=extra_commands,
test_mode=test_mode,
)
return loop_again | [
"def",
"regularrun",
"(",
"shell",
",",
"prompt_template",
"=",
"\"default\"",
",",
"aliases",
"=",
"None",
",",
"envvars",
"=",
"None",
",",
"extra_commands",
"=",
"None",
",",
"speed",
"=",
"1",
",",
"test_mode",
"=",
"False",
",",
"commentecho",
"=",
"False",
",",
")",
":",
"loop_again",
"=",
"True",
"command_string",
"=",
"regulartype",
"(",
"prompt_template",
")",
"if",
"command_string",
"==",
"TAB",
":",
"loop_again",
"=",
"False",
"return",
"loop_again",
"run_command",
"(",
"command_string",
",",
"shell",
",",
"aliases",
"=",
"aliases",
",",
"envvars",
"=",
"envvars",
",",
"extra_commands",
"=",
"extra_commands",
",",
"test_mode",
"=",
"test_mode",
",",
")",
"return",
"loop_again"
]
| Allow user to run their own live commands until CTRL-Z is pressed again. | [
"Allow",
"user",
"to",
"run",
"their",
"own",
"live",
"commands",
"until",
"CTRL",
"-",
"Z",
"is",
"pressed",
"again",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L174-L199 | train |
sloria/doitlive | doitlive/keyboard.py | magicrun | def magicrun(
text,
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
"""Echo out each character in ``text`` as keyboard characters are pressed,
wait for a RETURN keypress, then run the ``text`` in a shell context.
"""
goto_regulartype = magictype(text, prompt_template, speed)
if goto_regulartype:
return goto_regulartype
run_command(
text,
shell,
aliases=aliases,
envvars=envvars,
extra_commands=extra_commands,
test_mode=test_mode,
)
return goto_regulartype | python | def magicrun(
text,
shell,
prompt_template="default",
aliases=None,
envvars=None,
extra_commands=None,
speed=1,
test_mode=False,
commentecho=False,
):
"""Echo out each character in ``text`` as keyboard characters are pressed,
wait for a RETURN keypress, then run the ``text`` in a shell context.
"""
goto_regulartype = magictype(text, prompt_template, speed)
if goto_regulartype:
return goto_regulartype
run_command(
text,
shell,
aliases=aliases,
envvars=envvars,
extra_commands=extra_commands,
test_mode=test_mode,
)
return goto_regulartype | [
"def",
"magicrun",
"(",
"text",
",",
"shell",
",",
"prompt_template",
"=",
"\"default\"",
",",
"aliases",
"=",
"None",
",",
"envvars",
"=",
"None",
",",
"extra_commands",
"=",
"None",
",",
"speed",
"=",
"1",
",",
"test_mode",
"=",
"False",
",",
"commentecho",
"=",
"False",
",",
")",
":",
"goto_regulartype",
"=",
"magictype",
"(",
"text",
",",
"prompt_template",
",",
"speed",
")",
"if",
"goto_regulartype",
":",
"return",
"goto_regulartype",
"run_command",
"(",
"text",
",",
"shell",
",",
"aliases",
"=",
"aliases",
",",
"envvars",
"=",
"envvars",
",",
"extra_commands",
"=",
"extra_commands",
",",
"test_mode",
"=",
"test_mode",
",",
")",
"return",
"goto_regulartype"
]
| Echo out each character in ``text`` as keyboard characters are pressed,
wait for a RETURN keypress, then run the ``text`` in a shell context. | [
"Echo",
"out",
"each",
"character",
"in",
"text",
"as",
"keyboard",
"characters",
"are",
"pressed",
"wait",
"for",
"a",
"RETURN",
"keypress",
"then",
"run",
"the",
"text",
"in",
"a",
"shell",
"context",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/keyboard.py#L202-L227 | train |
sloria/doitlive | doitlive/python_consoles.py | PythonPlayerConsole.run_commands | def run_commands(self):
"""Automatically type and execute all commands."""
more = 0
prompt = sys.ps1
for command in self.commands:
try:
prompt = sys.ps2 if more else sys.ps1
try:
magictype(command, prompt_template=prompt, speed=self.speed)
except EOFError:
self.write("\n")
break
else:
if command.strip() == "exit()":
return
more = self.push(command)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
sys.exit(1)
echo_prompt(prompt)
wait_for(RETURNS) | python | def run_commands(self):
"""Automatically type and execute all commands."""
more = 0
prompt = sys.ps1
for command in self.commands:
try:
prompt = sys.ps2 if more else sys.ps1
try:
magictype(command, prompt_template=prompt, speed=self.speed)
except EOFError:
self.write("\n")
break
else:
if command.strip() == "exit()":
return
more = self.push(command)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
sys.exit(1)
echo_prompt(prompt)
wait_for(RETURNS) | [
"def",
"run_commands",
"(",
"self",
")",
":",
"more",
"=",
"0",
"prompt",
"=",
"sys",
".",
"ps1",
"for",
"command",
"in",
"self",
".",
"commands",
":",
"try",
":",
"prompt",
"=",
"sys",
".",
"ps2",
"if",
"more",
"else",
"sys",
".",
"ps1",
"try",
":",
"magictype",
"(",
"command",
",",
"prompt_template",
"=",
"prompt",
",",
"speed",
"=",
"self",
".",
"speed",
")",
"except",
"EOFError",
":",
"self",
".",
"write",
"(",
"\"\\n\"",
")",
"break",
"else",
":",
"if",
"command",
".",
"strip",
"(",
")",
"==",
"\"exit()\"",
":",
"return",
"more",
"=",
"self",
".",
"push",
"(",
"command",
")",
"except",
"KeyboardInterrupt",
":",
"self",
".",
"write",
"(",
"\"\\nKeyboardInterrupt\\n\"",
")",
"self",
".",
"resetbuffer",
"(",
")",
"more",
"=",
"0",
"sys",
".",
"exit",
"(",
"1",
")",
"echo_prompt",
"(",
"prompt",
")",
"wait_for",
"(",
"RETURNS",
")"
]
| Automatically type and execute all commands. | [
"Automatically",
"type",
"and",
"execute",
"all",
"commands",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/python_consoles.py#L20-L42 | train |
sloria/doitlive | doitlive/python_consoles.py | PythonPlayerConsole.interact | def interact(self, banner=None):
"""Run an interactive session."""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>>"
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = (
'Type "help", "copyright", "credits" or "license" for ' "more information."
)
if banner is None:
self.write("Python %s on %s\n%s\n" % (sys.version, sys.platform, cprt))
else:
self.write("%s\n" % str(banner))
self.run_commands() | python | def interact(self, banner=None):
"""Run an interactive session."""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>>"
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = (
'Type "help", "copyright", "credits" or "license" for ' "more information."
)
if banner is None:
self.write("Python %s on %s\n%s\n" % (sys.version, sys.platform, cprt))
else:
self.write("%s\n" % str(banner))
self.run_commands() | [
"def",
"interact",
"(",
"self",
",",
"banner",
"=",
"None",
")",
":",
"try",
":",
"sys",
".",
"ps1",
"except",
"AttributeError",
":",
"sys",
".",
"ps1",
"=",
"\">>>\"",
"try",
":",
"sys",
".",
"ps2",
"except",
"AttributeError",
":",
"sys",
".",
"ps2",
"=",
"\"... \"",
"cprt",
"=",
"(",
"'Type \"help\", \"copyright\", \"credits\" or \"license\" for '",
"\"more information.\"",
")",
"if",
"banner",
"is",
"None",
":",
"self",
".",
"write",
"(",
"\"Python %s on %s\\n%s\\n\"",
"%",
"(",
"sys",
".",
"version",
",",
"sys",
".",
"platform",
",",
"cprt",
")",
")",
"else",
":",
"self",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"str",
"(",
"banner",
")",
")",
"self",
".",
"run_commands",
"(",
")"
]
| Run an interactive session. | [
"Run",
"an",
"interactive",
"session",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/python_consoles.py#L44-L61 | train |
sloria/doitlive | doitlive/ipython_consoles.py | start_ipython_player | def start_ipython_player(commands, speed=1):
"""Starts a new magic IPython shell."""
PlayerTerminalIPythonApp.commands = commands
PlayerTerminalIPythonApp.speed = speed
PlayerTerminalIPythonApp.launch_instance() | python | def start_ipython_player(commands, speed=1):
"""Starts a new magic IPython shell."""
PlayerTerminalIPythonApp.commands = commands
PlayerTerminalIPythonApp.speed = speed
PlayerTerminalIPythonApp.launch_instance() | [
"def",
"start_ipython_player",
"(",
"commands",
",",
"speed",
"=",
"1",
")",
":",
"PlayerTerminalIPythonApp",
".",
"commands",
"=",
"commands",
"PlayerTerminalIPythonApp",
".",
"speed",
"=",
"speed",
"PlayerTerminalIPythonApp",
".",
"launch_instance",
"(",
")"
]
| Starts a new magic IPython shell. | [
"Starts",
"a",
"new",
"magic",
"IPython",
"shell",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L168-L172 | train |
sloria/doitlive | doitlive/ipython_consoles.py | PlayerTerminalInteractiveShell.on_feed_key | def on_feed_key(self, key_press):
"""Handles the magictyping when a key is pressed"""
if key_press.key in {Keys.Escape, Keys.ControlC}:
echo(carriage_return=True)
raise Abort()
if key_press.key == Keys.Backspace:
if self.current_command_pos > 0:
self.current_command_pos -= 1
return key_press
ret = None
if key_press.key != Keys.CPRResponse:
if self.current_command_pos < len(self.current_command):
current_key = self.current_command_key
ret = KeyPress(current_key)
increment = min(
[self.speed, len(self.current_command) - self.current_command_pos]
)
self.current_command_pos += increment
else:
# Command is finished, wait for Enter
if key_press.key != Keys.Enter:
return None
self.current_command_index += 1
self.current_command_pos = 0
ret = key_press
return ret | python | def on_feed_key(self, key_press):
"""Handles the magictyping when a key is pressed"""
if key_press.key in {Keys.Escape, Keys.ControlC}:
echo(carriage_return=True)
raise Abort()
if key_press.key == Keys.Backspace:
if self.current_command_pos > 0:
self.current_command_pos -= 1
return key_press
ret = None
if key_press.key != Keys.CPRResponse:
if self.current_command_pos < len(self.current_command):
current_key = self.current_command_key
ret = KeyPress(current_key)
increment = min(
[self.speed, len(self.current_command) - self.current_command_pos]
)
self.current_command_pos += increment
else:
# Command is finished, wait for Enter
if key_press.key != Keys.Enter:
return None
self.current_command_index += 1
self.current_command_pos = 0
ret = key_press
return ret | [
"def",
"on_feed_key",
"(",
"self",
",",
"key_press",
")",
":",
"if",
"key_press",
".",
"key",
"in",
"{",
"Keys",
".",
"Escape",
",",
"Keys",
".",
"ControlC",
"}",
":",
"echo",
"(",
"carriage_return",
"=",
"True",
")",
"raise",
"Abort",
"(",
")",
"if",
"key_press",
".",
"key",
"==",
"Keys",
".",
"Backspace",
":",
"if",
"self",
".",
"current_command_pos",
">",
"0",
":",
"self",
".",
"current_command_pos",
"-=",
"1",
"return",
"key_press",
"ret",
"=",
"None",
"if",
"key_press",
".",
"key",
"!=",
"Keys",
".",
"CPRResponse",
":",
"if",
"self",
".",
"current_command_pos",
"<",
"len",
"(",
"self",
".",
"current_command",
")",
":",
"current_key",
"=",
"self",
".",
"current_command_key",
"ret",
"=",
"KeyPress",
"(",
"current_key",
")",
"increment",
"=",
"min",
"(",
"[",
"self",
".",
"speed",
",",
"len",
"(",
"self",
".",
"current_command",
")",
"-",
"self",
".",
"current_command_pos",
"]",
")",
"self",
".",
"current_command_pos",
"+=",
"increment",
"else",
":",
"# Command is finished, wait for Enter",
"if",
"key_press",
".",
"key",
"!=",
"Keys",
".",
"Enter",
":",
"return",
"None",
"self",
".",
"current_command_index",
"+=",
"1",
"self",
".",
"current_command_pos",
"=",
"0",
"ret",
"=",
"key_press",
"return",
"ret"
]
| Handles the magictyping when a key is pressed | [
"Handles",
"the",
"magictyping",
"when",
"a",
"key",
"is",
"pressed"
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L61-L86 | train |
sloria/doitlive | doitlive/ipython_consoles.py | PlayerTerminalIPythonApp.init_shell | def init_shell(self):
"""initialize the InteractiveShell instance"""
self.shell = PlayerTerminalInteractiveShell.instance(
commands=self.commands,
speed=self.speed,
parent=self,
display_banner=False,
profile_dir=self.profile_dir,
ipython_dir=self.ipython_dir,
user_ns=self.user_ns,
)
self.shell.configurables.append(self) | python | def init_shell(self):
"""initialize the InteractiveShell instance"""
self.shell = PlayerTerminalInteractiveShell.instance(
commands=self.commands,
speed=self.speed,
parent=self,
display_banner=False,
profile_dir=self.profile_dir,
ipython_dir=self.ipython_dir,
user_ns=self.user_ns,
)
self.shell.configurables.append(self) | [
"def",
"init_shell",
"(",
"self",
")",
":",
"self",
".",
"shell",
"=",
"PlayerTerminalInteractiveShell",
".",
"instance",
"(",
"commands",
"=",
"self",
".",
"commands",
",",
"speed",
"=",
"self",
".",
"speed",
",",
"parent",
"=",
"self",
",",
"display_banner",
"=",
"False",
",",
"profile_dir",
"=",
"self",
".",
"profile_dir",
",",
"ipython_dir",
"=",
"self",
".",
"ipython_dir",
",",
"user_ns",
"=",
"self",
".",
"user_ns",
",",
")",
"self",
".",
"shell",
".",
"configurables",
".",
"append",
"(",
"self",
")"
]
| initialize the InteractiveShell instance | [
"initialize",
"the",
"InteractiveShell",
"instance"
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/ipython_consoles.py#L154-L165 | train |
sloria/doitlive | doitlive/termutils.py | raw_mode | def raw_mode():
"""
Enables terminal raw mode during the context.
Note: Currently noop for Windows systems.
Usage: ::
with raw_mode():
do_some_stuff()
"""
if WIN:
# No implementation for windows yet.
yield # needed for the empty context manager to work
else:
# imports are placed here because this will fail under Windows
import tty
import termios
if not isatty(sys.stdin):
f = open("/dev/tty")
fd = f.fileno()
else:
fd = sys.stdin.fileno()
f = None
try:
old_settings = termios.tcgetattr(fd)
tty.setraw(fd)
except termios.error:
pass
try:
yield
finally:
# this block sets the terminal to sane mode again,
# also in case an exception occured in the context manager
try:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
# sys.stdout.flush() # not needed I think.
if f is not None:
f.close()
except termios.error:
pass | python | def raw_mode():
"""
Enables terminal raw mode during the context.
Note: Currently noop for Windows systems.
Usage: ::
with raw_mode():
do_some_stuff()
"""
if WIN:
# No implementation for windows yet.
yield # needed for the empty context manager to work
else:
# imports are placed here because this will fail under Windows
import tty
import termios
if not isatty(sys.stdin):
f = open("/dev/tty")
fd = f.fileno()
else:
fd = sys.stdin.fileno()
f = None
try:
old_settings = termios.tcgetattr(fd)
tty.setraw(fd)
except termios.error:
pass
try:
yield
finally:
# this block sets the terminal to sane mode again,
# also in case an exception occured in the context manager
try:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
# sys.stdout.flush() # not needed I think.
if f is not None:
f.close()
except termios.error:
pass | [
"def",
"raw_mode",
"(",
")",
":",
"if",
"WIN",
":",
"# No implementation for windows yet.",
"yield",
"# needed for the empty context manager to work",
"else",
":",
"# imports are placed here because this will fail under Windows",
"import",
"tty",
"import",
"termios",
"if",
"not",
"isatty",
"(",
"sys",
".",
"stdin",
")",
":",
"f",
"=",
"open",
"(",
"\"/dev/tty\"",
")",
"fd",
"=",
"f",
".",
"fileno",
"(",
")",
"else",
":",
"fd",
"=",
"sys",
".",
"stdin",
".",
"fileno",
"(",
")",
"f",
"=",
"None",
"try",
":",
"old_settings",
"=",
"termios",
".",
"tcgetattr",
"(",
"fd",
")",
"tty",
".",
"setraw",
"(",
"fd",
")",
"except",
"termios",
".",
"error",
":",
"pass",
"try",
":",
"yield",
"finally",
":",
"# this block sets the terminal to sane mode again,",
"# also in case an exception occured in the context manager",
"try",
":",
"termios",
".",
"tcsetattr",
"(",
"fd",
",",
"termios",
".",
"TCSADRAIN",
",",
"old_settings",
")",
"# sys.stdout.flush() # not needed I think.",
"if",
"f",
"is",
"not",
"None",
":",
"f",
".",
"close",
"(",
")",
"except",
"termios",
".",
"error",
":",
"pass"
]
| Enables terminal raw mode during the context.
Note: Currently noop for Windows systems.
Usage: ::
with raw_mode():
do_some_stuff() | [
"Enables",
"terminal",
"raw",
"mode",
"during",
"the",
"context",
"."
]
| baf43f8ad3f2e4593fe21f6af42aedd34ef1efee | https://github.com/sloria/doitlive/blob/baf43f8ad3f2e4593fe21f6af42aedd34ef1efee/doitlive/termutils.py#L13-L54 | train |
skorokithakis/shortuuid | shortuuid/main.py | int_to_string | def int_to_string(number, alphabet, padding=None):
"""
Convert a number to a string, using the given alphabet.
The output has the most significant digit first.
"""
output = ""
alpha_len = len(alphabet)
while number:
number, digit = divmod(number, alpha_len)
output += alphabet[digit]
if padding:
remainder = max(padding - len(output), 0)
output = output + alphabet[0] * remainder
return output[::-1] | python | def int_to_string(number, alphabet, padding=None):
"""
Convert a number to a string, using the given alphabet.
The output has the most significant digit first.
"""
output = ""
alpha_len = len(alphabet)
while number:
number, digit = divmod(number, alpha_len)
output += alphabet[digit]
if padding:
remainder = max(padding - len(output), 0)
output = output + alphabet[0] * remainder
return output[::-1] | [
"def",
"int_to_string",
"(",
"number",
",",
"alphabet",
",",
"padding",
"=",
"None",
")",
":",
"output",
"=",
"\"\"",
"alpha_len",
"=",
"len",
"(",
"alphabet",
")",
"while",
"number",
":",
"number",
",",
"digit",
"=",
"divmod",
"(",
"number",
",",
"alpha_len",
")",
"output",
"+=",
"alphabet",
"[",
"digit",
"]",
"if",
"padding",
":",
"remainder",
"=",
"max",
"(",
"padding",
"-",
"len",
"(",
"output",
")",
",",
"0",
")",
"output",
"=",
"output",
"+",
"alphabet",
"[",
"0",
"]",
"*",
"remainder",
"return",
"output",
"[",
":",
":",
"-",
"1",
"]"
]
| Convert a number to a string, using the given alphabet.
The output has the most significant digit first. | [
"Convert",
"a",
"number",
"to",
"a",
"string",
"using",
"the",
"given",
"alphabet",
".",
"The",
"output",
"has",
"the",
"most",
"significant",
"digit",
"first",
"."
]
| 4da632a986c3a43f75c7df64f27a90bbf7ff8039 | https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L9-L22 | train |
skorokithakis/shortuuid | shortuuid/main.py | string_to_int | def string_to_int(string, alphabet):
"""
Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first.
"""
number = 0
alpha_len = len(alphabet)
for char in string:
number = number * alpha_len + alphabet.index(char)
return number | python | def string_to_int(string, alphabet):
"""
Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first.
"""
number = 0
alpha_len = len(alphabet)
for char in string:
number = number * alpha_len + alphabet.index(char)
return number | [
"def",
"string_to_int",
"(",
"string",
",",
"alphabet",
")",
":",
"number",
"=",
"0",
"alpha_len",
"=",
"len",
"(",
"alphabet",
")",
"for",
"char",
"in",
"string",
":",
"number",
"=",
"number",
"*",
"alpha_len",
"+",
"alphabet",
".",
"index",
"(",
"char",
")",
"return",
"number"
]
| Convert a string to a number, using the given alphabet.
The input is assumed to have the most significant digit first. | [
"Convert",
"a",
"string",
"to",
"a",
"number",
"using",
"the",
"given",
"alphabet",
".",
"The",
"input",
"is",
"assumed",
"to",
"have",
"the",
"most",
"significant",
"digit",
"first",
"."
]
| 4da632a986c3a43f75c7df64f27a90bbf7ff8039 | https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L25-L34 | train |
skorokithakis/shortuuid | shortuuid/main.py | ShortUUID.decode | def decode(self, string, legacy=False):
"""
Decode a string according to the current alphabet into a UUID
Raises ValueError when encountering illegal characters
or a too-long string.
If string too short, fills leftmost (MSB) bits with 0.
Pass `legacy=True` if your UUID was encoded with a ShortUUID version
prior to 0.6.0.
"""
if legacy:
string = string[::-1]
return _uu.UUID(int=string_to_int(string, self._alphabet)) | python | def decode(self, string, legacy=False):
"""
Decode a string according to the current alphabet into a UUID
Raises ValueError when encountering illegal characters
or a too-long string.
If string too short, fills leftmost (MSB) bits with 0.
Pass `legacy=True` if your UUID was encoded with a ShortUUID version
prior to 0.6.0.
"""
if legacy:
string = string[::-1]
return _uu.UUID(int=string_to_int(string, self._alphabet)) | [
"def",
"decode",
"(",
"self",
",",
"string",
",",
"legacy",
"=",
"False",
")",
":",
"if",
"legacy",
":",
"string",
"=",
"string",
"[",
":",
":",
"-",
"1",
"]",
"return",
"_uu",
".",
"UUID",
"(",
"int",
"=",
"string_to_int",
"(",
"string",
",",
"self",
".",
"_alphabet",
")",
")"
]
| Decode a string according to the current alphabet into a UUID
Raises ValueError when encountering illegal characters
or a too-long string.
If string too short, fills leftmost (MSB) bits with 0.
Pass `legacy=True` if your UUID was encoded with a ShortUUID version
prior to 0.6.0. | [
"Decode",
"a",
"string",
"according",
"to",
"the",
"current",
"alphabet",
"into",
"a",
"UUID",
"Raises",
"ValueError",
"when",
"encountering",
"illegal",
"characters",
"or",
"a",
"too",
"-",
"long",
"string",
"."
]
| 4da632a986c3a43f75c7df64f27a90bbf7ff8039 | https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L62-L75 | train |
skorokithakis/shortuuid | shortuuid/main.py | ShortUUID.set_alphabet | def set_alphabet(self, alphabet):
"""Set the alphabet to be used for new UUIDs."""
# Turn the alphabet into a set and sort it to prevent duplicates
# and ensure reproducibility.
new_alphabet = list(sorted(set(alphabet)))
if len(new_alphabet) > 1:
self._alphabet = new_alphabet
self._alpha_len = len(self._alphabet)
else:
raise ValueError("Alphabet with more than " "one unique symbols required.") | python | def set_alphabet(self, alphabet):
"""Set the alphabet to be used for new UUIDs."""
# Turn the alphabet into a set and sort it to prevent duplicates
# and ensure reproducibility.
new_alphabet = list(sorted(set(alphabet)))
if len(new_alphabet) > 1:
self._alphabet = new_alphabet
self._alpha_len = len(self._alphabet)
else:
raise ValueError("Alphabet with more than " "one unique symbols required.") | [
"def",
"set_alphabet",
"(",
"self",
",",
"alphabet",
")",
":",
"# Turn the alphabet into a set and sort it to prevent duplicates",
"# and ensure reproducibility.",
"new_alphabet",
"=",
"list",
"(",
"sorted",
"(",
"set",
"(",
"alphabet",
")",
")",
")",
"if",
"len",
"(",
"new_alphabet",
")",
">",
"1",
":",
"self",
".",
"_alphabet",
"=",
"new_alphabet",
"self",
".",
"_alpha_len",
"=",
"len",
"(",
"self",
".",
"_alphabet",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Alphabet with more than \"",
"\"one unique symbols required.\"",
")"
]
| Set the alphabet to be used for new UUIDs. | [
"Set",
"the",
"alphabet",
"to",
"be",
"used",
"for",
"new",
"UUIDs",
"."
]
| 4da632a986c3a43f75c7df64f27a90bbf7ff8039 | https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L111-L121 | train |
skorokithakis/shortuuid | shortuuid/main.py | ShortUUID.encoded_length | def encoded_length(self, num_bytes=16):
"""
Returns the string length of the shortened UUID.
"""
factor = math.log(256) / math.log(self._alpha_len)
return int(math.ceil(factor * num_bytes)) | python | def encoded_length(self, num_bytes=16):
"""
Returns the string length of the shortened UUID.
"""
factor = math.log(256) / math.log(self._alpha_len)
return int(math.ceil(factor * num_bytes)) | [
"def",
"encoded_length",
"(",
"self",
",",
"num_bytes",
"=",
"16",
")",
":",
"factor",
"=",
"math",
".",
"log",
"(",
"256",
")",
"/",
"math",
".",
"log",
"(",
"self",
".",
"_alpha_len",
")",
"return",
"int",
"(",
"math",
".",
"ceil",
"(",
"factor",
"*",
"num_bytes",
")",
")"
]
| Returns the string length of the shortened UUID. | [
"Returns",
"the",
"string",
"length",
"of",
"the",
"shortened",
"UUID",
"."
]
| 4da632a986c3a43f75c7df64f27a90bbf7ff8039 | https://github.com/skorokithakis/shortuuid/blob/4da632a986c3a43f75c7df64f27a90bbf7ff8039/shortuuid/main.py#L123-L128 | train |
quarkslab/arybo | arybo/lib/exprs_asm.py | asm_module | def asm_module(exprs, dst_reg, sym_to_reg, triple_or_target=None):
'''
Generate an LLVM module for a list of expressions
Arguments:
* See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments
Output:
* An LLVM module with one function named "__arybo", containing the
translated expression.
See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example.
'''
if not llvmlite_available:
raise RuntimeError("llvmlite module unavailable! can't assemble...")
target = llvm_get_target(triple_or_target)
M = ll.Module()
fntype = ll.FunctionType(ll.VoidType(), [])
func = ll.Function(M, fntype, name='__arybo')
func.attributes.add("naked")
func.attributes.add("nounwind")
BB = func.append_basic_block()
IRB = ll.IRBuilder()
IRB.position_at_end(BB)
sym_to_value = {sym: IRB.load_reg(IntType(reg[1]), reg[0], reg[0]) for sym,reg in six.iteritems(sym_to_reg)}
ret = to_llvm_ir(exprs, sym_to_value, IRB)
IRB.store_reg(ret, IntType(dst_reg[1]), dst_reg[0])
# See https://llvm.org/bugs/show_bug.cgi?id=15806
IRB.unreachable()
return M | python | def asm_module(exprs, dst_reg, sym_to_reg, triple_or_target=None):
'''
Generate an LLVM module for a list of expressions
Arguments:
* See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments
Output:
* An LLVM module with one function named "__arybo", containing the
translated expression.
See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example.
'''
if not llvmlite_available:
raise RuntimeError("llvmlite module unavailable! can't assemble...")
target = llvm_get_target(triple_or_target)
M = ll.Module()
fntype = ll.FunctionType(ll.VoidType(), [])
func = ll.Function(M, fntype, name='__arybo')
func.attributes.add("naked")
func.attributes.add("nounwind")
BB = func.append_basic_block()
IRB = ll.IRBuilder()
IRB.position_at_end(BB)
sym_to_value = {sym: IRB.load_reg(IntType(reg[1]), reg[0], reg[0]) for sym,reg in six.iteritems(sym_to_reg)}
ret = to_llvm_ir(exprs, sym_to_value, IRB)
IRB.store_reg(ret, IntType(dst_reg[1]), dst_reg[0])
# See https://llvm.org/bugs/show_bug.cgi?id=15806
IRB.unreachable()
return M | [
"def",
"asm_module",
"(",
"exprs",
",",
"dst_reg",
",",
"sym_to_reg",
",",
"triple_or_target",
"=",
"None",
")",
":",
"if",
"not",
"llvmlite_available",
":",
"raise",
"RuntimeError",
"(",
"\"llvmlite module unavailable! can't assemble...\"",
")",
"target",
"=",
"llvm_get_target",
"(",
"triple_or_target",
")",
"M",
"=",
"ll",
".",
"Module",
"(",
")",
"fntype",
"=",
"ll",
".",
"FunctionType",
"(",
"ll",
".",
"VoidType",
"(",
")",
",",
"[",
"]",
")",
"func",
"=",
"ll",
".",
"Function",
"(",
"M",
",",
"fntype",
",",
"name",
"=",
"'__arybo'",
")",
"func",
".",
"attributes",
".",
"add",
"(",
"\"naked\"",
")",
"func",
".",
"attributes",
".",
"add",
"(",
"\"nounwind\"",
")",
"BB",
"=",
"func",
".",
"append_basic_block",
"(",
")",
"IRB",
"=",
"ll",
".",
"IRBuilder",
"(",
")",
"IRB",
".",
"position_at_end",
"(",
"BB",
")",
"sym_to_value",
"=",
"{",
"sym",
":",
"IRB",
".",
"load_reg",
"(",
"IntType",
"(",
"reg",
"[",
"1",
"]",
")",
",",
"reg",
"[",
"0",
"]",
",",
"reg",
"[",
"0",
"]",
")",
"for",
"sym",
",",
"reg",
"in",
"six",
".",
"iteritems",
"(",
"sym_to_reg",
")",
"}",
"ret",
"=",
"to_llvm_ir",
"(",
"exprs",
",",
"sym_to_value",
",",
"IRB",
")",
"IRB",
".",
"store_reg",
"(",
"ret",
",",
"IntType",
"(",
"dst_reg",
"[",
"1",
"]",
")",
",",
"dst_reg",
"[",
"0",
"]",
")",
"# See https://llvm.org/bugs/show_bug.cgi?id=15806",
"IRB",
".",
"unreachable",
"(",
")",
"return",
"M"
]
| Generate an LLVM module for a list of expressions
Arguments:
* See :meth:`arybo.lib.exprs_asm.asm_binary` for a description of the list of arguments
Output:
* An LLVM module with one function named "__arybo", containing the
translated expression.
See :meth:`arybo.lib.exprs_asm.asm_binary` for an usage example. | [
"Generate",
"an",
"LLVM",
"module",
"for",
"a",
"list",
"of",
"expressions"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/exprs_asm.py#L225-L261 | train |
quarkslab/arybo | arybo/lib/exprs_asm.py | asm_binary | def asm_binary(exprs, dst_reg, sym_to_reg, triple_or_target=None):
'''
Compile and assemble an expression for a given architecture.
Arguments:
* *exprs*: list of expressions to convert. This can represent a graph of
expressions.
* *dst_reg*: final register on which to store the result of the last
expression. This is represented by a tuple ("reg_name", reg_size_bits).
Example: ("rax", 64)
* *sym_to_reg*: a dictionnary that maps Arybo variable name to registers
(described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)}
* *triple_or_target*: LLVM architecture triple to use. Use by default the
host architecture. Example: "x86_64-unknown-unknown"
Output:
* binary stream of the assembled expression for the given target
Here is an example that will compile and assemble "x+y" for x86_64::
from arybo.lib import MBA
from arybo.lib import mba_exprs
from arybo.lib.exprs_asm import asm_binary
mba = MBA(64)
x = mba.var("x")
y = mba.var("y")
e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y)
code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown")
print(code.hex())
which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``).
'''
if not llvmlite_available:
raise RuntimeError("llvmlite module unavailable! can't assemble...")
target = llvm_get_target(triple_or_target)
M = asm_module(exprs, dst_reg, sym_to_reg, target)
# Use LLVM to compile the '__arybo' function. As the function is naked and
# is the only, we just got to dump the .text section to get the binary
# assembly.
# No need for keystone or whatever hype stuff. llvmlite does the job.
M = llvm.parse_assembly(str(M))
M.verify()
target_machine = target.create_target_machine()
obj_bin = target_machine.emit_object(M)
obj = llvm.ObjectFileRef.from_data(obj_bin)
for s in obj.sections():
if s.is_text():
return s.data()
raise RuntimeError("unable to get the assembled binary!") | python | def asm_binary(exprs, dst_reg, sym_to_reg, triple_or_target=None):
'''
Compile and assemble an expression for a given architecture.
Arguments:
* *exprs*: list of expressions to convert. This can represent a graph of
expressions.
* *dst_reg*: final register on which to store the result of the last
expression. This is represented by a tuple ("reg_name", reg_size_bits).
Example: ("rax", 64)
* *sym_to_reg*: a dictionnary that maps Arybo variable name to registers
(described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)}
* *triple_or_target*: LLVM architecture triple to use. Use by default the
host architecture. Example: "x86_64-unknown-unknown"
Output:
* binary stream of the assembled expression for the given target
Here is an example that will compile and assemble "x+y" for x86_64::
from arybo.lib import MBA
from arybo.lib import mba_exprs
from arybo.lib.exprs_asm import asm_binary
mba = MBA(64)
x = mba.var("x")
y = mba.var("y")
e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y)
code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown")
print(code.hex())
which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``).
'''
if not llvmlite_available:
raise RuntimeError("llvmlite module unavailable! can't assemble...")
target = llvm_get_target(triple_or_target)
M = asm_module(exprs, dst_reg, sym_to_reg, target)
# Use LLVM to compile the '__arybo' function. As the function is naked and
# is the only, we just got to dump the .text section to get the binary
# assembly.
# No need for keystone or whatever hype stuff. llvmlite does the job.
M = llvm.parse_assembly(str(M))
M.verify()
target_machine = target.create_target_machine()
obj_bin = target_machine.emit_object(M)
obj = llvm.ObjectFileRef.from_data(obj_bin)
for s in obj.sections():
if s.is_text():
return s.data()
raise RuntimeError("unable to get the assembled binary!") | [
"def",
"asm_binary",
"(",
"exprs",
",",
"dst_reg",
",",
"sym_to_reg",
",",
"triple_or_target",
"=",
"None",
")",
":",
"if",
"not",
"llvmlite_available",
":",
"raise",
"RuntimeError",
"(",
"\"llvmlite module unavailable! can't assemble...\"",
")",
"target",
"=",
"llvm_get_target",
"(",
"triple_or_target",
")",
"M",
"=",
"asm_module",
"(",
"exprs",
",",
"dst_reg",
",",
"sym_to_reg",
",",
"target",
")",
"# Use LLVM to compile the '__arybo' function. As the function is naked and",
"# is the only, we just got to dump the .text section to get the binary",
"# assembly.",
"# No need for keystone or whatever hype stuff. llvmlite does the job.",
"M",
"=",
"llvm",
".",
"parse_assembly",
"(",
"str",
"(",
"M",
")",
")",
"M",
".",
"verify",
"(",
")",
"target_machine",
"=",
"target",
".",
"create_target_machine",
"(",
")",
"obj_bin",
"=",
"target_machine",
".",
"emit_object",
"(",
"M",
")",
"obj",
"=",
"llvm",
".",
"ObjectFileRef",
".",
"from_data",
"(",
"obj_bin",
")",
"for",
"s",
"in",
"obj",
".",
"sections",
"(",
")",
":",
"if",
"s",
".",
"is_text",
"(",
")",
":",
"return",
"s",
".",
"data",
"(",
")",
"raise",
"RuntimeError",
"(",
"\"unable to get the assembled binary!\"",
")"
]
| Compile and assemble an expression for a given architecture.
Arguments:
* *exprs*: list of expressions to convert. This can represent a graph of
expressions.
* *dst_reg*: final register on which to store the result of the last
expression. This is represented by a tuple ("reg_name", reg_size_bits).
Example: ("rax", 64)
* *sym_to_reg*: a dictionnary that maps Arybo variable name to registers
(described as tuple, see *dst_reg*). Example: {"x": ("rdi",64), "y": ("rsi", 64)}
* *triple_or_target*: LLVM architecture triple to use. Use by default the
host architecture. Example: "x86_64-unknown-unknown"
Output:
* binary stream of the assembled expression for the given target
Here is an example that will compile and assemble "x+y" for x86_64::
from arybo.lib import MBA
from arybo.lib import mba_exprs
from arybo.lib.exprs_asm import asm_binary
mba = MBA(64)
x = mba.var("x")
y = mba.var("y")
e = mba_exprs.ExprBV(x) + mba_exprs.ExprBV(y)
code = asm_binary([e], ("rax", 64), {"x": ("rdi", 64), "y": ("rsi", 64)}, "x86_64-unknown-unknown")
print(code.hex())
which outputs ``488d0437`` (which is equivalent to ``lea rax,[rdi+rsi*1]``). | [
"Compile",
"and",
"assemble",
"an",
"expression",
"for",
"a",
"given",
"architecture",
"."
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/exprs_asm.py#L263-L314 | train |
quarkslab/arybo | arybo/lib/mba_if.py | expr_contains | def expr_contains(e, o):
''' Returns true if o is in e '''
if o == e:
return True
if e.has_args():
for a in e.args():
if expr_contains(a, o):
return True
return False | python | def expr_contains(e, o):
''' Returns true if o is in e '''
if o == e:
return True
if e.has_args():
for a in e.args():
if expr_contains(a, o):
return True
return False | [
"def",
"expr_contains",
"(",
"e",
",",
"o",
")",
":",
"if",
"o",
"==",
"e",
":",
"return",
"True",
"if",
"e",
".",
"has_args",
"(",
")",
":",
"for",
"a",
"in",
"e",
".",
"args",
"(",
")",
":",
"if",
"expr_contains",
"(",
"a",
",",
"o",
")",
":",
"return",
"True",
"return",
"False"
]
| Returns true if o is in e | [
"Returns",
"true",
"if",
"o",
"is",
"in",
"e"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L36-L44 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBAVariable.zext | def zext(self, n):
''' Zero-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown
'''
if n <= self.nbits:
raise ValueError("n must be > %d bits" % self.nbits)
mba_ret = self.__new_mba(n)
ret = mba_ret.from_cst(0)
for i in range(self.nbits):
ret.vec[i] = self.vec[i]
return mba_ret.from_vec(ret) | python | def zext(self, n):
''' Zero-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown
'''
if n <= self.nbits:
raise ValueError("n must be > %d bits" % self.nbits)
mba_ret = self.__new_mba(n)
ret = mba_ret.from_cst(0)
for i in range(self.nbits):
ret.vec[i] = self.vec[i]
return mba_ret.from_vec(ret) | [
"def",
"zext",
"(",
"self",
",",
"n",
")",
":",
"if",
"n",
"<=",
"self",
".",
"nbits",
":",
"raise",
"ValueError",
"(",
"\"n must be > %d bits\"",
"%",
"self",
".",
"nbits",
")",
"mba_ret",
"=",
"self",
".",
"__new_mba",
"(",
"n",
")",
"ret",
"=",
"mba_ret",
".",
"from_cst",
"(",
"0",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"nbits",
")",
":",
"ret",
".",
"vec",
"[",
"i",
"]",
"=",
"self",
".",
"vec",
"[",
"i",
"]",
"return",
"mba_ret",
".",
"from_vec",
"(",
"ret",
")"
]
| Zero-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown | [
"Zero",
"-",
"extend",
"the",
"variable",
"to",
"n",
"bits",
".",
"n",
"bits",
"must",
"be",
"stricly",
"larger",
"than",
"the",
"actual",
"number",
"of",
"bits",
"or",
"a",
"ValueError",
"is",
"thrown"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L255-L269 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBAVariable.sext | def sext(self, n):
''' Sign-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown
'''
if n <= self.nbits:
raise ValueError("n must be > %d bits" % self.nbits)
mba_ret = self.__new_mba(n)
ret = mba_ret.from_cst(0)
for i in range(self.nbits):
ret.vec[i] = self.vec[i]
last_bit = self.vec[self.nbits-1]
for i in range(self.nbits,n):
ret.vec[i] = last_bit
return mba_ret.from_vec(ret) | python | def sext(self, n):
''' Sign-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown
'''
if n <= self.nbits:
raise ValueError("n must be > %d bits" % self.nbits)
mba_ret = self.__new_mba(n)
ret = mba_ret.from_cst(0)
for i in range(self.nbits):
ret.vec[i] = self.vec[i]
last_bit = self.vec[self.nbits-1]
for i in range(self.nbits,n):
ret.vec[i] = last_bit
return mba_ret.from_vec(ret) | [
"def",
"sext",
"(",
"self",
",",
"n",
")",
":",
"if",
"n",
"<=",
"self",
".",
"nbits",
":",
"raise",
"ValueError",
"(",
"\"n must be > %d bits\"",
"%",
"self",
".",
"nbits",
")",
"mba_ret",
"=",
"self",
".",
"__new_mba",
"(",
"n",
")",
"ret",
"=",
"mba_ret",
".",
"from_cst",
"(",
"0",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"nbits",
")",
":",
"ret",
".",
"vec",
"[",
"i",
"]",
"=",
"self",
".",
"vec",
"[",
"i",
"]",
"last_bit",
"=",
"self",
".",
"vec",
"[",
"self",
".",
"nbits",
"-",
"1",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"nbits",
",",
"n",
")",
":",
"ret",
".",
"vec",
"[",
"i",
"]",
"=",
"last_bit",
"return",
"mba_ret",
".",
"from_vec",
"(",
"ret",
")"
]
| Sign-extend the variable to n bits.
n bits must be stricly larger than the actual number of bits, or a
ValueError is thrown | [
"Sign",
"-",
"extend",
"the",
"variable",
"to",
"n",
"bits",
".",
"n",
"bits",
"must",
"be",
"stricly",
"larger",
"than",
"the",
"actual",
"number",
"of",
"bits",
"or",
"a",
"ValueError",
"is",
"thrown"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L271-L288 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBAVariable.evaluate | def evaluate(self, values):
''' Evaluates the expression to an integer
values is a dictionnary that associates n-bit variables to integer
values. Every symbolic variables used in the expression must be
represented.
For instance, let x and y 4-bit variables, and e = x+y:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x+y
To evaluate e with x=4 and y=5, we can do:
>>> e.eval({x: 4, y: 5})
9
If a variable is missing from values, an exception will occur. (x
or y in the example above)
'''
ret = self.mba.evaluate(self.vec, values)
if isinstance(ret, six.integer_types):
return ret
return self.from_vec(self.mba, ret) | python | def evaluate(self, values):
''' Evaluates the expression to an integer
values is a dictionnary that associates n-bit variables to integer
values. Every symbolic variables used in the expression must be
represented.
For instance, let x and y 4-bit variables, and e = x+y:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x+y
To evaluate e with x=4 and y=5, we can do:
>>> e.eval({x: 4, y: 5})
9
If a variable is missing from values, an exception will occur. (x
or y in the example above)
'''
ret = self.mba.evaluate(self.vec, values)
if isinstance(ret, six.integer_types):
return ret
return self.from_vec(self.mba, ret) | [
"def",
"evaluate",
"(",
"self",
",",
"values",
")",
":",
"ret",
"=",
"self",
".",
"mba",
".",
"evaluate",
"(",
"self",
".",
"vec",
",",
"values",
")",
"if",
"isinstance",
"(",
"ret",
",",
"six",
".",
"integer_types",
")",
":",
"return",
"ret",
"return",
"self",
".",
"from_vec",
"(",
"self",
".",
"mba",
",",
"ret",
")"
]
| Evaluates the expression to an integer
values is a dictionnary that associates n-bit variables to integer
values. Every symbolic variables used in the expression must be
represented.
For instance, let x and y 4-bit variables, and e = x+y:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x+y
To evaluate e with x=4 and y=5, we can do:
>>> e.eval({x: 4, y: 5})
9
If a variable is missing from values, an exception will occur. (x
or y in the example above) | [
"Evaluates",
"the",
"expression",
"to",
"an",
"integer"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L290-L315 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBAVariable.vectorial_decomp | def vectorial_decomp(self, symbols):
''' Compute the vectorial decomposition of the expression according to the given symbols.
symbols is a list that represents the input of the resulting
application. They are considerated as a flatten vector of bits.
Args:
symbols: TODO
Returns:
An :class:`pytanque.App` object
Example:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x^y^6
>>> e.vectorial_decomp([x,y])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0, 1, 0, 0, 0]
[0, 1, 0, 0, 0, 1, 0, 0]
[0, 0, 1, 0, 0, 0, 1, 0]
[0, 0, 0, 1, 0, 0, 0, 1]
])
AffApp cst = Vec([
0,
1,
1,
0
])
'''
try:
symbols = [s.vec for s in symbols]
N = sum(map(lambda s: len(s), symbols))
symbols_ = Vector(N)
i = 0
for v in symbols:
for s in v:
symbols_[i] = s
i += 1
symbols = symbols_
except TypeError: pass
return self.mba.vectorial_decomp(symbols, self.vec) | python | def vectorial_decomp(self, symbols):
''' Compute the vectorial decomposition of the expression according to the given symbols.
symbols is a list that represents the input of the resulting
application. They are considerated as a flatten vector of bits.
Args:
symbols: TODO
Returns:
An :class:`pytanque.App` object
Example:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x^y^6
>>> e.vectorial_decomp([x,y])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0, 1, 0, 0, 0]
[0, 1, 0, 0, 0, 1, 0, 0]
[0, 0, 1, 0, 0, 0, 1, 0]
[0, 0, 0, 1, 0, 0, 0, 1]
])
AffApp cst = Vec([
0,
1,
1,
0
])
'''
try:
symbols = [s.vec for s in symbols]
N = sum(map(lambda s: len(s), symbols))
symbols_ = Vector(N)
i = 0
for v in symbols:
for s in v:
symbols_[i] = s
i += 1
symbols = symbols_
except TypeError: pass
return self.mba.vectorial_decomp(symbols, self.vec) | [
"def",
"vectorial_decomp",
"(",
"self",
",",
"symbols",
")",
":",
"try",
":",
"symbols",
"=",
"[",
"s",
".",
"vec",
"for",
"s",
"in",
"symbols",
"]",
"N",
"=",
"sum",
"(",
"map",
"(",
"lambda",
"s",
":",
"len",
"(",
"s",
")",
",",
"symbols",
")",
")",
"symbols_",
"=",
"Vector",
"(",
"N",
")",
"i",
"=",
"0",
"for",
"v",
"in",
"symbols",
":",
"for",
"s",
"in",
"v",
":",
"symbols_",
"[",
"i",
"]",
"=",
"s",
"i",
"+=",
"1",
"symbols",
"=",
"symbols_",
"except",
"TypeError",
":",
"pass",
"return",
"self",
".",
"mba",
".",
"vectorial_decomp",
"(",
"symbols",
",",
"self",
".",
"vec",
")"
]
| Compute the vectorial decomposition of the expression according to the given symbols.
symbols is a list that represents the input of the resulting
application. They are considerated as a flatten vector of bits.
Args:
symbols: TODO
Returns:
An :class:`pytanque.App` object
Example:
>>> mba = MBA(4)
>>> x = mba.var('x')
>>> y = mba.var('y')
>>> e = x^y^6
>>> e.vectorial_decomp([x,y])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0, 1, 0, 0, 0]
[0, 1, 0, 0, 0, 1, 0, 0]
[0, 0, 1, 0, 0, 0, 1, 0]
[0, 0, 0, 1, 0, 0, 0, 1]
])
AffApp cst = Vec([
0,
1,
1,
0
]) | [
"Compute",
"the",
"vectorial",
"decomposition",
"of",
"the",
"expression",
"according",
"to",
"the",
"given",
"symbols",
"."
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L338-L386 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBA.var | def var(self, name):
''' Get an n-bit named symbolic variable
Returns:
An :class:`MBAVariable` object representing a symbolic variable
Example:
>>> mba.var('x')
Vec([
x0,
x1,
x2,
x3
])
'''
ret = self.from_vec(self.var_symbols(name))
ret.name = name
return ret | python | def var(self, name):
''' Get an n-bit named symbolic variable
Returns:
An :class:`MBAVariable` object representing a symbolic variable
Example:
>>> mba.var('x')
Vec([
x0,
x1,
x2,
x3
])
'''
ret = self.from_vec(self.var_symbols(name))
ret.name = name
return ret | [
"def",
"var",
"(",
"self",
",",
"name",
")",
":",
"ret",
"=",
"self",
".",
"from_vec",
"(",
"self",
".",
"var_symbols",
"(",
"name",
")",
")",
"ret",
".",
"name",
"=",
"name",
"return",
"ret"
]
| Get an n-bit named symbolic variable
Returns:
An :class:`MBAVariable` object representing a symbolic variable
Example:
>>> mba.var('x')
Vec([
x0,
x1,
x2,
x3
]) | [
"Get",
"an",
"n",
"-",
"bit",
"named",
"symbolic",
"variable"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L408-L426 | train |
quarkslab/arybo | arybo/lib/mba_if.py | MBA.permut2expr | def permut2expr(self, P):
''' Convert a substitution table into an arybo application
Args:
P: list of integers. The list must not contain more than 2**nbits elements.
Returns:
A tuple containing an :class:`MBAVariable` object with the result
and the symbolic input variable used in this object. A typical use
case is to feed these into vectorial_decomp.
Example:
>>> mba = MBA(4)
>>> P = [i^7 for i in range(16)]
>>> E,X = mba.permut2expr(P)
>>> E.vectorial_decomp([X])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0]
[0, 1, 0, 0]
[0, 0, 1, 0]
[0, 0, 0, 1]
])
AffApp cst = Vec([
1,
1,
1,
0
])
'''
if len(P) > (1<<self.nbits):
raise ValueError("P must not contain more than %d elements" % (1<<self.nbits))
X = self.var('X')
ret = super(MBA, self).permut2expr(P, X.vec)
return self.from_vec(ret), X | python | def permut2expr(self, P):
''' Convert a substitution table into an arybo application
Args:
P: list of integers. The list must not contain more than 2**nbits elements.
Returns:
A tuple containing an :class:`MBAVariable` object with the result
and the symbolic input variable used in this object. A typical use
case is to feed these into vectorial_decomp.
Example:
>>> mba = MBA(4)
>>> P = [i^7 for i in range(16)]
>>> E,X = mba.permut2expr(P)
>>> E.vectorial_decomp([X])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0]
[0, 1, 0, 0]
[0, 0, 1, 0]
[0, 0, 0, 1]
])
AffApp cst = Vec([
1,
1,
1,
0
])
'''
if len(P) > (1<<self.nbits):
raise ValueError("P must not contain more than %d elements" % (1<<self.nbits))
X = self.var('X')
ret = super(MBA, self).permut2expr(P, X.vec)
return self.from_vec(ret), X | [
"def",
"permut2expr",
"(",
"self",
",",
"P",
")",
":",
"if",
"len",
"(",
"P",
")",
">",
"(",
"1",
"<<",
"self",
".",
"nbits",
")",
":",
"raise",
"ValueError",
"(",
"\"P must not contain more than %d elements\"",
"%",
"(",
"1",
"<<",
"self",
".",
"nbits",
")",
")",
"X",
"=",
"self",
".",
"var",
"(",
"'X'",
")",
"ret",
"=",
"super",
"(",
"MBA",
",",
"self",
")",
".",
"permut2expr",
"(",
"P",
",",
"X",
".",
"vec",
")",
"return",
"self",
".",
"from_vec",
"(",
"ret",
")",
",",
"X"
]
| Convert a substitution table into an arybo application
Args:
P: list of integers. The list must not contain more than 2**nbits elements.
Returns:
A tuple containing an :class:`MBAVariable` object with the result
and the symbolic input variable used in this object. A typical use
case is to feed these into vectorial_decomp.
Example:
>>> mba = MBA(4)
>>> P = [i^7 for i in range(16)]
>>> E,X = mba.permut2expr(P)
>>> E.vectorial_decomp([X])
App NL = Vec([
0,
0,
0,
0
])
AffApp matrix = Mat([
[1, 0, 0, 0]
[0, 1, 0, 0]
[0, 0, 1, 0]
[0, 0, 0, 1]
])
AffApp cst = Vec([
1,
1,
1,
0
]) | [
"Convert",
"a",
"substitution",
"table",
"into",
"an",
"arybo",
"application"
]
| 04fad817090b3b9f2328a5e984457aba6024e971 | https://github.com/quarkslab/arybo/blob/04fad817090b3b9f2328a5e984457aba6024e971/arybo/lib/mba_if.py#L444-L483 | train |
requests/requests-ntlm | requests_ntlm/requests_ntlm.py | HttpNtlmAuth.response_hook | def response_hook(self, r, **kwargs):
"""The actual hook handler."""
if r.status_code == 401:
# Handle server auth.
www_authenticate = r.headers.get('www-authenticate', '').lower()
auth_type = _auth_type_from_header(www_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'www-authenticate',
'Authorization',
r,
auth_type,
kwargs
)
elif r.status_code == 407:
# If we didn't have server auth, do proxy auth.
proxy_authenticate = r.headers.get(
'proxy-authenticate', ''
).lower()
auth_type = _auth_type_from_header(proxy_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'proxy-authenticate',
'Proxy-authorization',
r,
auth_type,
kwargs
)
return r | python | def response_hook(self, r, **kwargs):
"""The actual hook handler."""
if r.status_code == 401:
# Handle server auth.
www_authenticate = r.headers.get('www-authenticate', '').lower()
auth_type = _auth_type_from_header(www_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'www-authenticate',
'Authorization',
r,
auth_type,
kwargs
)
elif r.status_code == 407:
# If we didn't have server auth, do proxy auth.
proxy_authenticate = r.headers.get(
'proxy-authenticate', ''
).lower()
auth_type = _auth_type_from_header(proxy_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'proxy-authenticate',
'Proxy-authorization',
r,
auth_type,
kwargs
)
return r | [
"def",
"response_hook",
"(",
"self",
",",
"r",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"r",
".",
"status_code",
"==",
"401",
":",
"# Handle server auth.",
"www_authenticate",
"=",
"r",
".",
"headers",
".",
"get",
"(",
"'www-authenticate'",
",",
"''",
")",
".",
"lower",
"(",
")",
"auth_type",
"=",
"_auth_type_from_header",
"(",
"www_authenticate",
")",
"if",
"auth_type",
"is",
"not",
"None",
":",
"return",
"self",
".",
"retry_using_http_NTLM_auth",
"(",
"'www-authenticate'",
",",
"'Authorization'",
",",
"r",
",",
"auth_type",
",",
"kwargs",
")",
"elif",
"r",
".",
"status_code",
"==",
"407",
":",
"# If we didn't have server auth, do proxy auth.",
"proxy_authenticate",
"=",
"r",
".",
"headers",
".",
"get",
"(",
"'proxy-authenticate'",
",",
"''",
")",
".",
"lower",
"(",
")",
"auth_type",
"=",
"_auth_type_from_header",
"(",
"proxy_authenticate",
")",
"if",
"auth_type",
"is",
"not",
"None",
":",
"return",
"self",
".",
"retry_using_http_NTLM_auth",
"(",
"'proxy-authenticate'",
",",
"'Proxy-authorization'",
",",
"r",
",",
"auth_type",
",",
"kwargs",
")",
"return",
"r"
]
| The actual hook handler. | [
"The",
"actual",
"hook",
"handler",
"."
]
| f71fee60aa64c17941114d4eae40aed670a77afd | https://github.com/requests/requests-ntlm/blob/f71fee60aa64c17941114d4eae40aed670a77afd/requests_ntlm/requests_ntlm.py#L138-L168 | train |
esafak/mca | src/mca.py | dummy | def dummy(DF, cols=None):
"""Dummy code select columns of a DataFrame."""
dummies = (get_dummies(DF[col]) for col in
(DF.columns if cols is None else cols))
return concat(dummies, axis=1, keys=DF.columns) | python | def dummy(DF, cols=None):
"""Dummy code select columns of a DataFrame."""
dummies = (get_dummies(DF[col]) for col in
(DF.columns if cols is None else cols))
return concat(dummies, axis=1, keys=DF.columns) | [
"def",
"dummy",
"(",
"DF",
",",
"cols",
"=",
"None",
")",
":",
"dummies",
"=",
"(",
"get_dummies",
"(",
"DF",
"[",
"col",
"]",
")",
"for",
"col",
"in",
"(",
"DF",
".",
"columns",
"if",
"cols",
"is",
"None",
"else",
"cols",
")",
")",
"return",
"concat",
"(",
"dummies",
",",
"axis",
"=",
"1",
",",
"keys",
"=",
"DF",
".",
"columns",
")"
]
| Dummy code select columns of a DataFrame. | [
"Dummy",
"code",
"select",
"columns",
"of",
"a",
"DataFrame",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L30-L34 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.