repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.projectname
def projectname(self): """The name of the project that is currently processed""" if self._projectname is None: exps = self.config.experiments if self._experiment is not None and self._experiment in exps: return exps[self._experiment]['project'] try: self._projectname = list(self.config.projects.keys())[-1] except IndexError: # no project has yet been created ever raise ValueError( "No experiment has yet been created! Please run setup " "before.") return self._projectname
python
def projectname(self): """The name of the project that is currently processed""" if self._projectname is None: exps = self.config.experiments if self._experiment is not None and self._experiment in exps: return exps[self._experiment]['project'] try: self._projectname = list(self.config.projects.keys())[-1] except IndexError: # no project has yet been created ever raise ValueError( "No experiment has yet been created! Please run setup " "before.") return self._projectname
[ "def", "projectname", "(", "self", ")", ":", "if", "self", ".", "_projectname", "is", "None", ":", "exps", "=", "self", ".", "config", ".", "experiments", "if", "self", ".", "_experiment", "is", "not", "None", "and", "self", ".", "_experiment", "in", "exps", ":", "return", "exps", "[", "self", ".", "_experiment", "]", "[", "'project'", "]", "try", ":", "self", ".", "_projectname", "=", "list", "(", "self", ".", "config", ".", "projects", ".", "keys", "(", ")", ")", "[", "-", "1", "]", "except", "IndexError", ":", "# no project has yet been created ever", "raise", "ValueError", "(", "\"No experiment has yet been created! Please run setup \"", "\"before.\"", ")", "return", "self", ".", "_projectname" ]
The name of the project that is currently processed
[ "The", "name", "of", "the", "project", "that", "is", "currently", "processed" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L215-L227
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.experiment
def experiment(self): """The identifier or the experiment that is currently processed""" if self._experiment is None: self._experiment = list(self.config.experiments.keys())[-1] return self._experiment
python
def experiment(self): """The identifier or the experiment that is currently processed""" if self._experiment is None: self._experiment = list(self.config.experiments.keys())[-1] return self._experiment
[ "def", "experiment", "(", "self", ")", ":", "if", "self", ".", "_experiment", "is", "None", ":", "self", ".", "_experiment", "=", "list", "(", "self", ".", "config", ".", "experiments", ".", "keys", "(", ")", ")", "[", "-", "1", "]", "return", "self", ".", "_experiment" ]
The identifier or the experiment that is currently processed
[ "The", "identifier", "or", "the", "experiment", "that", "is", "currently", "processed" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L235-L239
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.app_main
def app_main(self, experiment=None, last=False, new=False, verbose=False, verbosity_level=None, no_modification=False, match=False): """ The main function for parsing global arguments Parameters ---------- experiment: str The id of the experiment to use last: bool If True, the last experiment is used new: bool If True, a new experiment is created verbose: bool Increase the verbosity level to DEBUG. See also `verbosity_level` for a more specific determination of the verbosity verbosity_level: str or int The verbosity level to use. Either one of ``'DEBUG', 'INFO', 'WARNING', 'ERROR'`` or the corresponding integer (see pythons logging module) no_modification: bool If True/set, no modifications in the configuration files will be done match: bool If True/set, interprete `experiment` as a regular expression (regex) und use the matching experiment""" if match: patt = re.compile(experiment) matches = list(filter(patt.search, self.config.experiments)) if len(matches) > 1: raise ValueError("Found multiple matches for %s: %s" % ( experiment, matches)) elif len(matches) == 0: raise ValueError("No experiment matches %s" % experiment) experiment = matches[0] if last and self.config.experiments: self.experiment = None elif new and self.config.experiments: try: self.experiment = utils.get_next_name(self.experiment) except ValueError: raise ValueError( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id.") else: self._experiment = experiment if verbose: verbose = logging.DEBUG elif verbosity_level: if verbosity_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR']: verbose = getattr(logging, verbosity_level) else: verbose = int(verbosity_level) if verbose: logging.getLogger( utils.get_toplevel_module(inspect.getmodule(self))).setLevel( verbose) self.logger.setLevel(verbose) self.no_modification = no_modification
python
def app_main(self, experiment=None, last=False, new=False, verbose=False, verbosity_level=None, no_modification=False, match=False): """ The main function for parsing global arguments Parameters ---------- experiment: str The id of the experiment to use last: bool If True, the last experiment is used new: bool If True, a new experiment is created verbose: bool Increase the verbosity level to DEBUG. See also `verbosity_level` for a more specific determination of the verbosity verbosity_level: str or int The verbosity level to use. Either one of ``'DEBUG', 'INFO', 'WARNING', 'ERROR'`` or the corresponding integer (see pythons logging module) no_modification: bool If True/set, no modifications in the configuration files will be done match: bool If True/set, interprete `experiment` as a regular expression (regex) und use the matching experiment""" if match: patt = re.compile(experiment) matches = list(filter(patt.search, self.config.experiments)) if len(matches) > 1: raise ValueError("Found multiple matches for %s: %s" % ( experiment, matches)) elif len(matches) == 0: raise ValueError("No experiment matches %s" % experiment) experiment = matches[0] if last and self.config.experiments: self.experiment = None elif new and self.config.experiments: try: self.experiment = utils.get_next_name(self.experiment) except ValueError: raise ValueError( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id.") else: self._experiment = experiment if verbose: verbose = logging.DEBUG elif verbosity_level: if verbosity_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR']: verbose = getattr(logging, verbosity_level) else: verbose = int(verbosity_level) if verbose: logging.getLogger( utils.get_toplevel_module(inspect.getmodule(self))).setLevel( verbose) self.logger.setLevel(verbose) self.no_modification = no_modification
[ "def", "app_main", "(", "self", ",", "experiment", "=", "None", ",", "last", "=", "False", ",", "new", "=", "False", ",", "verbose", "=", "False", ",", "verbosity_level", "=", "None", ",", "no_modification", "=", "False", ",", "match", "=", "False", ")", ":", "if", "match", ":", "patt", "=", "re", ".", "compile", "(", "experiment", ")", "matches", "=", "list", "(", "filter", "(", "patt", ".", "search", ",", "self", ".", "config", ".", "experiments", ")", ")", "if", "len", "(", "matches", ")", ">", "1", ":", "raise", "ValueError", "(", "\"Found multiple matches for %s: %s\"", "%", "(", "experiment", ",", "matches", ")", ")", "elif", "len", "(", "matches", ")", "==", "0", ":", "raise", "ValueError", "(", "\"No experiment matches %s\"", "%", "experiment", ")", "experiment", "=", "matches", "[", "0", "]", "if", "last", "and", "self", ".", "config", ".", "experiments", ":", "self", ".", "experiment", "=", "None", "elif", "new", "and", "self", ".", "config", ".", "experiments", ":", "try", ":", "self", ".", "experiment", "=", "utils", ".", "get_next_name", "(", "self", ".", "experiment", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"Could not estimate an experiment id! Please use the \"", "\"experiment argument to provide an id.\"", ")", "else", ":", "self", ".", "_experiment", "=", "experiment", "if", "verbose", ":", "verbose", "=", "logging", ".", "DEBUG", "elif", "verbosity_level", ":", "if", "verbosity_level", "in", "[", "'DEBUG'", ",", "'INFO'", ",", "'WARNING'", ",", "'ERROR'", "]", ":", "verbose", "=", "getattr", "(", "logging", ",", "verbosity_level", ")", "else", ":", "verbose", "=", "int", "(", "verbosity_level", ")", "if", "verbose", ":", "logging", ".", "getLogger", "(", "utils", ".", "get_toplevel_module", "(", "inspect", ".", "getmodule", "(", "self", ")", ")", ")", ".", "setLevel", "(", "verbose", ")", "self", ".", "logger", ".", "setLevel", "(", "verbose", ")", "self", ".", "no_modification", "=", "no_modification" ]
The main function for parsing global arguments Parameters ---------- experiment: str The id of the experiment to use last: bool If True, the last experiment is used new: bool If True, a new experiment is created verbose: bool Increase the verbosity level to DEBUG. See also `verbosity_level` for a more specific determination of the verbosity verbosity_level: str or int The verbosity level to use. Either one of ``'DEBUG', 'INFO', 'WARNING', 'ERROR'`` or the corresponding integer (see pythons logging module) no_modification: bool If True/set, no modifications in the configuration files will be done match: bool If True/set, interprete `experiment` as a regular expression (regex) und use the matching experiment
[ "The", "main", "function", "for", "parsing", "global", "arguments" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L248-L307
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.setup
def setup(self, root_dir, projectname=None, link=False, **kwargs): """ Perform the initial setup for the project Parameters ---------- root_dir: str The path to the root directory where the experiments, etc. will be stored projectname: str The name of the project that shall be initialized at `root_dir`. A new directory will be created namely ``root_dir + '/' + projectname`` link: bool If set, the source files are linked to the original ones instead of copied Other Parameters ---------------- ``**kwargs`` Are passed to the :meth:`app_main` method """ projects = self.config.projects if not projects and projectname is None: projectname = self.name + '0' elif projectname is None: # try to increment a number in the last used try: projectname = utils.get_next_name(self.projectname) except ValueError: raise ValueError( "Could not estimate a project name! Please use the " "projectname argument to provide a project name.") self.app_main(**kwargs) root_dir = osp.abspath(osp.join(root_dir, projectname)) projects[projectname] = OrderedDict([ ('name', projectname), ('root', root_dir), ('timestamps', OrderedDict())]) data_dir = self.config.global_config.get( 'data', osp.join(root_dir, 'data')) projects[projectname]['data'] = data_dir self.projectname = projectname self.logger.info("Initializing project %s", projectname) self.logger.debug(" Creating root directory %s", root_dir) if not osp.exists(root_dir): os.makedirs(root_dir) return root_dir
python
def setup(self, root_dir, projectname=None, link=False, **kwargs): """ Perform the initial setup for the project Parameters ---------- root_dir: str The path to the root directory where the experiments, etc. will be stored projectname: str The name of the project that shall be initialized at `root_dir`. A new directory will be created namely ``root_dir + '/' + projectname`` link: bool If set, the source files are linked to the original ones instead of copied Other Parameters ---------------- ``**kwargs`` Are passed to the :meth:`app_main` method """ projects = self.config.projects if not projects and projectname is None: projectname = self.name + '0' elif projectname is None: # try to increment a number in the last used try: projectname = utils.get_next_name(self.projectname) except ValueError: raise ValueError( "Could not estimate a project name! Please use the " "projectname argument to provide a project name.") self.app_main(**kwargs) root_dir = osp.abspath(osp.join(root_dir, projectname)) projects[projectname] = OrderedDict([ ('name', projectname), ('root', root_dir), ('timestamps', OrderedDict())]) data_dir = self.config.global_config.get( 'data', osp.join(root_dir, 'data')) projects[projectname]['data'] = data_dir self.projectname = projectname self.logger.info("Initializing project %s", projectname) self.logger.debug(" Creating root directory %s", root_dir) if not osp.exists(root_dir): os.makedirs(root_dir) return root_dir
[ "def", "setup", "(", "self", ",", "root_dir", ",", "projectname", "=", "None", ",", "link", "=", "False", ",", "*", "*", "kwargs", ")", ":", "projects", "=", "self", ".", "config", ".", "projects", "if", "not", "projects", "and", "projectname", "is", "None", ":", "projectname", "=", "self", ".", "name", "+", "'0'", "elif", "projectname", "is", "None", ":", "# try to increment a number in the last used", "try", ":", "projectname", "=", "utils", ".", "get_next_name", "(", "self", ".", "projectname", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"Could not estimate a project name! Please use the \"", "\"projectname argument to provide a project name.\"", ")", "self", ".", "app_main", "(", "*", "*", "kwargs", ")", "root_dir", "=", "osp", ".", "abspath", "(", "osp", ".", "join", "(", "root_dir", ",", "projectname", ")", ")", "projects", "[", "projectname", "]", "=", "OrderedDict", "(", "[", "(", "'name'", ",", "projectname", ")", ",", "(", "'root'", ",", "root_dir", ")", ",", "(", "'timestamps'", ",", "OrderedDict", "(", ")", ")", "]", ")", "data_dir", "=", "self", ".", "config", ".", "global_config", ".", "get", "(", "'data'", ",", "osp", ".", "join", "(", "root_dir", ",", "'data'", ")", ")", "projects", "[", "projectname", "]", "[", "'data'", "]", "=", "data_dir", "self", ".", "projectname", "=", "projectname", "self", ".", "logger", ".", "info", "(", "\"Initializing project %s\"", ",", "projectname", ")", "self", ".", "logger", ".", "debug", "(", "\" Creating root directory %s\"", ",", "root_dir", ")", "if", "not", "osp", ".", "exists", "(", "root_dir", ")", ":", "os", ".", "makedirs", "(", "root_dir", ")", "return", "root_dir" ]
Perform the initial setup for the project Parameters ---------- root_dir: str The path to the root directory where the experiments, etc. will be stored projectname: str The name of the project that shall be initialized at `root_dir`. A new directory will be created namely ``root_dir + '/' + projectname`` link: bool If set, the source files are linked to the original ones instead of copied Other Parameters ---------------- ``**kwargs`` Are passed to the :meth:`app_main` method
[ "Perform", "the", "initial", "setup", "for", "the", "project" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L366-L411
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.init
def init(self, projectname=None, description=None, **kwargs): """ Initialize a new experiment Parameters ---------- projectname: str The name of the project that shall be used. If None, the last one created will be used description: str A short summary of the experiment ``**kwargs`` Keyword arguments passed to the :meth:`app_main` method Notes ----- If the experiment is None, a new experiment will be created """ self.app_main(**kwargs) experiments = self.config.experiments experiment = self._experiment if experiment is None and not experiments: experiment = self.name + '_exp0' elif experiment is None: try: experiment = utils.get_next_name(self.experiment) except ValueError: raise ValueError( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id.") self.experiment = experiment if self.is_archived(experiment): raise ValueError( "The specified experiment has already been archived! Run " "``%s -id %s unarchive`` first" % (self.name, experiment)) if projectname is None: projectname = self.projectname else: self.projectname = projectname self.logger.info("Initializing experiment %s of project %s", experiment, projectname) exp_dict = experiments.setdefault(experiment, OrderedDict()) if description is not None: exp_dict['description'] = description exp_dict['project'] = projectname exp_dict['expdir'] = exp_dir = osp.join('experiments', experiment) exp_dir = osp.join(self.config.projects[projectname]['root'], exp_dir) exp_dict['timestamps'] = OrderedDict() if not os.path.exists(exp_dir): self.logger.debug(" Creating experiment directory %s", exp_dir) os.makedirs(exp_dir) self.fix_paths(exp_dict) return exp_dict
python
def init(self, projectname=None, description=None, **kwargs): """ Initialize a new experiment Parameters ---------- projectname: str The name of the project that shall be used. If None, the last one created will be used description: str A short summary of the experiment ``**kwargs`` Keyword arguments passed to the :meth:`app_main` method Notes ----- If the experiment is None, a new experiment will be created """ self.app_main(**kwargs) experiments = self.config.experiments experiment = self._experiment if experiment is None and not experiments: experiment = self.name + '_exp0' elif experiment is None: try: experiment = utils.get_next_name(self.experiment) except ValueError: raise ValueError( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id.") self.experiment = experiment if self.is_archived(experiment): raise ValueError( "The specified experiment has already been archived! Run " "``%s -id %s unarchive`` first" % (self.name, experiment)) if projectname is None: projectname = self.projectname else: self.projectname = projectname self.logger.info("Initializing experiment %s of project %s", experiment, projectname) exp_dict = experiments.setdefault(experiment, OrderedDict()) if description is not None: exp_dict['description'] = description exp_dict['project'] = projectname exp_dict['expdir'] = exp_dir = osp.join('experiments', experiment) exp_dir = osp.join(self.config.projects[projectname]['root'], exp_dir) exp_dict['timestamps'] = OrderedDict() if not os.path.exists(exp_dir): self.logger.debug(" Creating experiment directory %s", exp_dir) os.makedirs(exp_dir) self.fix_paths(exp_dict) return exp_dict
[ "def", "init", "(", "self", ",", "projectname", "=", "None", ",", "description", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "app_main", "(", "*", "*", "kwargs", ")", "experiments", "=", "self", ".", "config", ".", "experiments", "experiment", "=", "self", ".", "_experiment", "if", "experiment", "is", "None", "and", "not", "experiments", ":", "experiment", "=", "self", ".", "name", "+", "'_exp0'", "elif", "experiment", "is", "None", ":", "try", ":", "experiment", "=", "utils", ".", "get_next_name", "(", "self", ".", "experiment", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"Could not estimate an experiment id! Please use the \"", "\"experiment argument to provide an id.\"", ")", "self", ".", "experiment", "=", "experiment", "if", "self", ".", "is_archived", "(", "experiment", ")", ":", "raise", "ValueError", "(", "\"The specified experiment has already been archived! Run \"", "\"``%s -id %s unarchive`` first\"", "%", "(", "self", ".", "name", ",", "experiment", ")", ")", "if", "projectname", "is", "None", ":", "projectname", "=", "self", ".", "projectname", "else", ":", "self", ".", "projectname", "=", "projectname", "self", ".", "logger", ".", "info", "(", "\"Initializing experiment %s of project %s\"", ",", "experiment", ",", "projectname", ")", "exp_dict", "=", "experiments", ".", "setdefault", "(", "experiment", ",", "OrderedDict", "(", ")", ")", "if", "description", "is", "not", "None", ":", "exp_dict", "[", "'description'", "]", "=", "description", "exp_dict", "[", "'project'", "]", "=", "projectname", "exp_dict", "[", "'expdir'", "]", "=", "exp_dir", "=", "osp", ".", "join", "(", "'experiments'", ",", "experiment", ")", "exp_dir", "=", "osp", ".", "join", "(", "self", ".", "config", ".", "projects", "[", "projectname", "]", "[", "'root'", "]", ",", "exp_dir", ")", "exp_dict", "[", "'timestamps'", "]", "=", "OrderedDict", "(", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "exp_dir", ")", ":", "self", ".", "logger", ".", "debug", "(", "\" Creating experiment directory %s\"", ",", "exp_dir", ")", "os", ".", "makedirs", "(", "exp_dir", ")", "self", ".", "fix_paths", "(", "exp_dict", ")", "return", "exp_dict" ]
Initialize a new experiment Parameters ---------- projectname: str The name of the project that shall be used. If None, the last one created will be used description: str A short summary of the experiment ``**kwargs`` Keyword arguments passed to the :meth:`app_main` method Notes ----- If the experiment is None, a new experiment will be created
[ "Initialize", "a", "new", "experiment" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L418-L471
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.get_value
def get_value(self, keys, exp_path=False, project_path=False, complete=False, on_projects=False, on_globals=False, projectname=None, no_fix=False, only_keys=False, base='', return_list=False, archives=False, **kwargs): """ Get one or more values in the configuration Parameters ---------- keys: list of str A list of keys to get the values of. %(get_value_note)s %(ModelOrganizer.info.parameters.exp_path|project_path)s %(ModelOrganizer.info.common_params)s %(ModelOrganizer.info.parameters.no_fix|only_keys|archives)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time return_list: bool If True, the list of values corresponding to `keys` is returned, otherwise they are printed separated by a new line to the standard output """ def pretty_print(val): if isinstance(val, dict): if only_keys: val = list(val.keys()) return ordered_yaml_dump( val, default_flow_style=False).rstrip() return str(val) config = self.info(exp_path=exp_path, project_path=project_path, complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, no_fix=no_fix, return_dict=True, insert_id=False, archives=archives, **kwargs) ret = [0] * len(keys) for i, key in enumerate(keys): if base: key = base + key key, sub_config = utils.go_through_dict(key, config) ret[i] = sub_config[key] if return_list: return ret return (self.print_ or six.print_)('\n'.join(map(pretty_print, ret)))
python
def get_value(self, keys, exp_path=False, project_path=False, complete=False, on_projects=False, on_globals=False, projectname=None, no_fix=False, only_keys=False, base='', return_list=False, archives=False, **kwargs): """ Get one or more values in the configuration Parameters ---------- keys: list of str A list of keys to get the values of. %(get_value_note)s %(ModelOrganizer.info.parameters.exp_path|project_path)s %(ModelOrganizer.info.common_params)s %(ModelOrganizer.info.parameters.no_fix|only_keys|archives)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time return_list: bool If True, the list of values corresponding to `keys` is returned, otherwise they are printed separated by a new line to the standard output """ def pretty_print(val): if isinstance(val, dict): if only_keys: val = list(val.keys()) return ordered_yaml_dump( val, default_flow_style=False).rstrip() return str(val) config = self.info(exp_path=exp_path, project_path=project_path, complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, no_fix=no_fix, return_dict=True, insert_id=False, archives=archives, **kwargs) ret = [0] * len(keys) for i, key in enumerate(keys): if base: key = base + key key, sub_config = utils.go_through_dict(key, config) ret[i] = sub_config[key] if return_list: return ret return (self.print_ or six.print_)('\n'.join(map(pretty_print, ret)))
[ "def", "get_value", "(", "self", ",", "keys", ",", "exp_path", "=", "False", ",", "project_path", "=", "False", ",", "complete", "=", "False", ",", "on_projects", "=", "False", ",", "on_globals", "=", "False", ",", "projectname", "=", "None", ",", "no_fix", "=", "False", ",", "only_keys", "=", "False", ",", "base", "=", "''", ",", "return_list", "=", "False", ",", "archives", "=", "False", ",", "*", "*", "kwargs", ")", ":", "def", "pretty_print", "(", "val", ")", ":", "if", "isinstance", "(", "val", ",", "dict", ")", ":", "if", "only_keys", ":", "val", "=", "list", "(", "val", ".", "keys", "(", ")", ")", "return", "ordered_yaml_dump", "(", "val", ",", "default_flow_style", "=", "False", ")", ".", "rstrip", "(", ")", "return", "str", "(", "val", ")", "config", "=", "self", ".", "info", "(", "exp_path", "=", "exp_path", ",", "project_path", "=", "project_path", ",", "complete", "=", "complete", ",", "on_projects", "=", "on_projects", ",", "on_globals", "=", "on_globals", ",", "projectname", "=", "projectname", ",", "no_fix", "=", "no_fix", ",", "return_dict", "=", "True", ",", "insert_id", "=", "False", ",", "archives", "=", "archives", ",", "*", "*", "kwargs", ")", "ret", "=", "[", "0", "]", "*", "len", "(", "keys", ")", "for", "i", ",", "key", "in", "enumerate", "(", "keys", ")", ":", "if", "base", ":", "key", "=", "base", "+", "key", "key", ",", "sub_config", "=", "utils", ".", "go_through_dict", "(", "key", ",", "config", ")", "ret", "[", "i", "]", "=", "sub_config", "[", "key", "]", "if", "return_list", ":", "return", "ret", "return", "(", "self", ".", "print_", "or", "six", ".", "print_", ")", "(", "'\\n'", ".", "join", "(", "map", "(", "pretty_print", ",", "ret", ")", ")", ")" ]
Get one or more values in the configuration Parameters ---------- keys: list of str A list of keys to get the values of. %(get_value_note)s %(ModelOrganizer.info.parameters.exp_path|project_path)s %(ModelOrganizer.info.common_params)s %(ModelOrganizer.info.parameters.no_fix|only_keys|archives)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time return_list: bool If True, the list of values corresponding to `keys` is returned, otherwise they are printed separated by a new line to the standard output
[ "Get", "one", "or", "more", "values", "in", "the", "configuration" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1159-L1201
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.del_value
def del_value(self, keys, complete=False, on_projects=False, on_globals=False, projectname=None, base='', dtype=None, **kwargs): """ Delete a value in the configuration Parameters ---------- keys: list of str A list of keys to be deleted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time """ config = self.info(complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, return_dict=True, insert_id=False, **kwargs) for key in keys: if base: key = base + key key, sub_config = utils.go_through_dict(key, config) del sub_config[key]
python
def del_value(self, keys, complete=False, on_projects=False, on_globals=False, projectname=None, base='', dtype=None, **kwargs): """ Delete a value in the configuration Parameters ---------- keys: list of str A list of keys to be deleted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time """ config = self.info(complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, return_dict=True, insert_id=False, **kwargs) for key in keys: if base: key = base + key key, sub_config = utils.go_through_dict(key, config) del sub_config[key]
[ "def", "del_value", "(", "self", ",", "keys", ",", "complete", "=", "False", ",", "on_projects", "=", "False", ",", "on_globals", "=", "False", ",", "projectname", "=", "None", ",", "base", "=", "''", ",", "dtype", "=", "None", ",", "*", "*", "kwargs", ")", ":", "config", "=", "self", ".", "info", "(", "complete", "=", "complete", ",", "on_projects", "=", "on_projects", ",", "on_globals", "=", "on_globals", ",", "projectname", "=", "projectname", ",", "return_dict", "=", "True", ",", "insert_id", "=", "False", ",", "*", "*", "kwargs", ")", "for", "key", "in", "keys", ":", "if", "base", ":", "key", "=", "base", "+", "key", "key", ",", "sub_config", "=", "utils", ".", "go_through_dict", "(", "key", ",", "config", ")", "del", "sub_config", "[", "key", "]" ]
Delete a value in the configuration Parameters ---------- keys: list of str A list of keys to be deleted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time
[ "Delete", "a", "value", "in", "the", "configuration" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1219-L1241
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.configure
def configure(self, global_config=False, project_config=False, ifile=None, forcing=None, serial=False, nprocs=None, update_from=None, **kwargs): """ Configure the project and experiments Parameters ---------- global_config: bool If True/set, the configuration are applied globally (already existing and configured experiments are not impacted) project_config: bool Apply the configuration on the entire project instance instead of only the single experiment (already existing and configured experiments are not impacted) ifile: str The input file for the project. Must be a netCDF file with population data forcing: str The input file for the project containing variables with population evolution information. Possible variables in the netCDF file are *movement* containing the number of people to move and *change* containing the population change (positive or negative) serial: bool Do the parameterization always serial (i.e. not in parallel on multiple processors). Does automatically impact global settings nprocs: int or 'all' Maximum number of processes to when making the parameterization in parallel. Does automatically impact global settings and disables `serial` update_from: str Path to a yaml configuration file to update the specified configuration with it ``**kwargs`` Other keywords for the :meth:`app_main` method""" if global_config: d = self.config.global_config elif project_config: self.app_main(**kwargs) d = self.config.projects[self.projectname] else: d = self.config.experiments[self.experiment] if ifile is not None: d['input'] = osp.abspath(ifile) if forcing is not None: d['forcing'] = osp.abspath(forcing) if update_from is not None: with open('update_from') as f: d.update(yaml.load(f)) global_config = self.config.global_config if serial: global_config['serial'] = True elif nprocs: nprocs = int(nprocs) if nprocs != 'all' else nprocs global_config['serial'] = False global_config['nprocs'] = nprocs
python
def configure(self, global_config=False, project_config=False, ifile=None, forcing=None, serial=False, nprocs=None, update_from=None, **kwargs): """ Configure the project and experiments Parameters ---------- global_config: bool If True/set, the configuration are applied globally (already existing and configured experiments are not impacted) project_config: bool Apply the configuration on the entire project instance instead of only the single experiment (already existing and configured experiments are not impacted) ifile: str The input file for the project. Must be a netCDF file with population data forcing: str The input file for the project containing variables with population evolution information. Possible variables in the netCDF file are *movement* containing the number of people to move and *change* containing the population change (positive or negative) serial: bool Do the parameterization always serial (i.e. not in parallel on multiple processors). Does automatically impact global settings nprocs: int or 'all' Maximum number of processes to when making the parameterization in parallel. Does automatically impact global settings and disables `serial` update_from: str Path to a yaml configuration file to update the specified configuration with it ``**kwargs`` Other keywords for the :meth:`app_main` method""" if global_config: d = self.config.global_config elif project_config: self.app_main(**kwargs) d = self.config.projects[self.projectname] else: d = self.config.experiments[self.experiment] if ifile is not None: d['input'] = osp.abspath(ifile) if forcing is not None: d['forcing'] = osp.abspath(forcing) if update_from is not None: with open('update_from') as f: d.update(yaml.load(f)) global_config = self.config.global_config if serial: global_config['serial'] = True elif nprocs: nprocs = int(nprocs) if nprocs != 'all' else nprocs global_config['serial'] = False global_config['nprocs'] = nprocs
[ "def", "configure", "(", "self", ",", "global_config", "=", "False", ",", "project_config", "=", "False", ",", "ifile", "=", "None", ",", "forcing", "=", "None", ",", "serial", "=", "False", ",", "nprocs", "=", "None", ",", "update_from", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "global_config", ":", "d", "=", "self", ".", "config", ".", "global_config", "elif", "project_config", ":", "self", ".", "app_main", "(", "*", "*", "kwargs", ")", "d", "=", "self", ".", "config", ".", "projects", "[", "self", ".", "projectname", "]", "else", ":", "d", "=", "self", ".", "config", ".", "experiments", "[", "self", ".", "experiment", "]", "if", "ifile", "is", "not", "None", ":", "d", "[", "'input'", "]", "=", "osp", ".", "abspath", "(", "ifile", ")", "if", "forcing", "is", "not", "None", ":", "d", "[", "'forcing'", "]", "=", "osp", ".", "abspath", "(", "forcing", ")", "if", "update_from", "is", "not", "None", ":", "with", "open", "(", "'update_from'", ")", "as", "f", ":", "d", ".", "update", "(", "yaml", ".", "load", "(", "f", ")", ")", "global_config", "=", "self", ".", "config", ".", "global_config", "if", "serial", ":", "global_config", "[", "'serial'", "]", "=", "True", "elif", "nprocs", ":", "nprocs", "=", "int", "(", "nprocs", ")", "if", "nprocs", "!=", "'all'", "else", "nprocs", "global_config", "[", "'serial'", "]", "=", "False", "global_config", "[", "'nprocs'", "]", "=", "nprocs" ]
Configure the project and experiments Parameters ---------- global_config: bool If True/set, the configuration are applied globally (already existing and configured experiments are not impacted) project_config: bool Apply the configuration on the entire project instance instead of only the single experiment (already existing and configured experiments are not impacted) ifile: str The input file for the project. Must be a netCDF file with population data forcing: str The input file for the project containing variables with population evolution information. Possible variables in the netCDF file are *movement* containing the number of people to move and *change* containing the population change (positive or negative) serial: bool Do the parameterization always serial (i.e. not in parallel on multiple processors). Does automatically impact global settings nprocs: int or 'all' Maximum number of processes to when making the parameterization in parallel. Does automatically impact global settings and disables `serial` update_from: str Path to a yaml configuration file to update the specified configuration with it ``**kwargs`` Other keywords for the :meth:`app_main` method
[ "Configure", "the", "project", "and", "experiments" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1259-L1317
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.set_value
def set_value(self, items, complete=False, on_projects=False, on_globals=False, projectname=None, base='', dtype=None, **kwargs): """ Set a value in the configuration Parameters ---------- items: dict A dictionary whose keys correspond to the item in the configuration and whose values are what shall be inserted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time dtype: str The name of the data type or a data type to cast the value to """ def identity(val): return val config = self.info(complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, return_dict=True, insert_id=False, **kwargs) if isinstance(dtype, six.string_types): dtype = getattr(builtins, dtype) elif dtype is None: dtype = identity for key, value in six.iteritems(dict(items)): if base: key = base + key key, sub_config = utils.go_through_dict(key, config, setdefault=OrderedDict) if key in self.paths: if isinstance(value, six.string_types): value = osp.abspath(value) else: value = list(map(osp.abspath, value)) sub_config[key] = dtype(value)
python
def set_value(self, items, complete=False, on_projects=False, on_globals=False, projectname=None, base='', dtype=None, **kwargs): """ Set a value in the configuration Parameters ---------- items: dict A dictionary whose keys correspond to the item in the configuration and whose values are what shall be inserted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time dtype: str The name of the data type or a data type to cast the value to """ def identity(val): return val config = self.info(complete=complete, on_projects=on_projects, on_globals=on_globals, projectname=projectname, return_dict=True, insert_id=False, **kwargs) if isinstance(dtype, six.string_types): dtype = getattr(builtins, dtype) elif dtype is None: dtype = identity for key, value in six.iteritems(dict(items)): if base: key = base + key key, sub_config = utils.go_through_dict(key, config, setdefault=OrderedDict) if key in self.paths: if isinstance(value, six.string_types): value = osp.abspath(value) else: value = list(map(osp.abspath, value)) sub_config[key] = dtype(value)
[ "def", "set_value", "(", "self", ",", "items", ",", "complete", "=", "False", ",", "on_projects", "=", "False", ",", "on_globals", "=", "False", ",", "projectname", "=", "None", ",", "base", "=", "''", ",", "dtype", "=", "None", ",", "*", "*", "kwargs", ")", ":", "def", "identity", "(", "val", ")", ":", "return", "val", "config", "=", "self", ".", "info", "(", "complete", "=", "complete", ",", "on_projects", "=", "on_projects", ",", "on_globals", "=", "on_globals", ",", "projectname", "=", "projectname", ",", "return_dict", "=", "True", ",", "insert_id", "=", "False", ",", "*", "*", "kwargs", ")", "if", "isinstance", "(", "dtype", ",", "six", ".", "string_types", ")", ":", "dtype", "=", "getattr", "(", "builtins", ",", "dtype", ")", "elif", "dtype", "is", "None", ":", "dtype", "=", "identity", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "dict", "(", "items", ")", ")", ":", "if", "base", ":", "key", "=", "base", "+", "key", "key", ",", "sub_config", "=", "utils", ".", "go_through_dict", "(", "key", ",", "config", ",", "setdefault", "=", "OrderedDict", ")", "if", "key", "in", "self", ".", "paths", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "value", "=", "osp", ".", "abspath", "(", "value", ")", "else", ":", "value", "=", "list", "(", "map", "(", "osp", ".", "abspath", ",", "value", ")", ")", "sub_config", "[", "key", "]", "=", "dtype", "(", "value", ")" ]
Set a value in the configuration Parameters ---------- items: dict A dictionary whose keys correspond to the item in the configuration and whose values are what shall be inserted. %(get_value_note)s %(ModelOrganizer.info.common_params)s base: str A base string that shall be put in front of each key in `values` to avoid typing it all the time dtype: str The name of the data type or a data type to cast the value to
[ "Set", "a", "value", "in", "the", "configuration" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1331-L1368
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.rel_paths
def rel_paths(self, *args, **kwargs): """ Fix the paths in the given dictionary to get relative paths Parameters ---------- %(ExperimentsConfig.rel_paths.parameters)s Returns ------- %(ExperimentsConfig.rel_paths.returns)s Notes ----- d is modified in place!""" return self.config.experiments.rel_paths(*args, **kwargs)
python
def rel_paths(self, *args, **kwargs): """ Fix the paths in the given dictionary to get relative paths Parameters ---------- %(ExperimentsConfig.rel_paths.parameters)s Returns ------- %(ExperimentsConfig.rel_paths.returns)s Notes ----- d is modified in place!""" return self.config.experiments.rel_paths(*args, **kwargs)
[ "def", "rel_paths", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "config", ".", "experiments", ".", "rel_paths", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Fix the paths in the given dictionary to get relative paths Parameters ---------- %(ExperimentsConfig.rel_paths.parameters)s Returns ------- %(ExperimentsConfig.rel_paths.returns)s Notes ----- d is modified in place!
[ "Fix", "the", "paths", "in", "the", "given", "dictionary", "to", "get", "relative", "paths" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1410-L1425
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.abspath
def abspath(self, path, project=None, root=None): """Returns the path from the current working directory We only store the paths relative to the root directory of the project. This method fixes those path to be applicable from the working directory Parameters ---------- path: str The original path as it is stored in the configuration project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path as it is accessible from the current working directory""" if root is None: root = self.config.projects[project or self.projectname]['root'] return osp.join(root, path)
python
def abspath(self, path, project=None, root=None): """Returns the path from the current working directory We only store the paths relative to the root directory of the project. This method fixes those path to be applicable from the working directory Parameters ---------- path: str The original path as it is stored in the configuration project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path as it is accessible from the current working directory""" if root is None: root = self.config.projects[project or self.projectname]['root'] return osp.join(root, path)
[ "def", "abspath", "(", "self", ",", "path", ",", "project", "=", "None", ",", "root", "=", "None", ")", ":", "if", "root", "is", "None", ":", "root", "=", "self", ".", "config", ".", "projects", "[", "project", "or", "self", ".", "projectname", "]", "[", "'root'", "]", "return", "osp", ".", "join", "(", "root", ",", "path", ")" ]
Returns the path from the current working directory We only store the paths relative to the root directory of the project. This method fixes those path to be applicable from the working directory Parameters ---------- path: str The original path as it is stored in the configuration project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path as it is accessible from the current working directory
[ "Returns", "the", "path", "from", "the", "current", "working", "directory" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1440-L1463
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.relpath
def relpath(self, path, project=None, root=None): """Returns the relative path from the root directory of the project We only store the paths relative to the root directory of the project. This method gives you this path from a path that is accessible from the current working directory Parameters ---------- path: str The original path accessible from the current working directory project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path relative from the root directory""" if root is None: root = self.config.projects[project or self.projectname]['root'] return osp.relpath(path, root)
python
def relpath(self, path, project=None, root=None): """Returns the relative path from the root directory of the project We only store the paths relative to the root directory of the project. This method gives you this path from a path that is accessible from the current working directory Parameters ---------- path: str The original path accessible from the current working directory project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path relative from the root directory""" if root is None: root = self.config.projects[project or self.projectname]['root'] return osp.relpath(path, root)
[ "def", "relpath", "(", "self", ",", "path", ",", "project", "=", "None", ",", "root", "=", "None", ")", ":", "if", "root", "is", "None", ":", "root", "=", "self", ".", "config", ".", "projects", "[", "project", "or", "self", ".", "projectname", "]", "[", "'root'", "]", "return", "osp", ".", "relpath", "(", "path", ",", "root", ")" ]
Returns the relative path from the root directory of the project We only store the paths relative to the root directory of the project. This method gives you this path from a path that is accessible from the current working directory Parameters ---------- path: str The original path accessible from the current working directory project: str The project to use. If None, the :attr:`projectname` attribute is used root: str If not None, the root directory of the project Returns ------- str The path relative from the root directory
[ "Returns", "the", "relative", "path", "from", "the", "root", "directory", "of", "the", "project" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1465-L1488
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.setup_parser
def setup_parser(self, parser=None, subparsers=None): """ Create the argument parser for this instance This method uses the functions defined in the :attr:`commands` attribute to create a command line utility via the :class:`FuncArgParser` class. Each command in the :attr:`commands` attribute is interpreted as on subparser and setup initially via the :meth:`FuncArgParser.setup_args` method. You can modify the parser for each command *cmd* by including a ``_modify_cmd`` method that accepts the subparser as an argument Parameters ---------- parser: FuncArgParser The parser to use. If None, a new one will be created subparsers: argparse._SubParsersAction The subparsers to use. If None, the :attr:`~ArgumentParser.add_subparser` method from `parser` will be called Returns ------- FuncArgParser The created command line parser or the given `parser` argparse._SubParsersAction The created subparsers action or the given `subparsers` dict A mapping from command name in the :attr:`commands` attribute to the corresponding command line parser See Also -------- parse_args""" commands = self.commands[:] parser_cmds = self.parser_commands.copy() if subparsers is None: if parser is None: parser = FuncArgParser(self.name) subparsers = parser.add_subparsers(chain=True) ret = {} for i, cmd in enumerate(commands[:]): func = getattr(self, cmd) parser_cmd = parser_cmds.setdefault(cmd, cmd.replace('_', '-')) ret[cmd] = sp = parser.setup_subparser( func, name=parser_cmd, return_parser=True) sp.setup_args(func) modifier = getattr(self, '_modify_' + cmd, None) if modifier is not None: modifier(sp) self.parser_commands = parser_cmds parser.setup_args(self.app_main) self._modify_app_main(parser) self.parser = parser self.subparsers = ret return parser, subparsers, ret
python
def setup_parser(self, parser=None, subparsers=None): """ Create the argument parser for this instance This method uses the functions defined in the :attr:`commands` attribute to create a command line utility via the :class:`FuncArgParser` class. Each command in the :attr:`commands` attribute is interpreted as on subparser and setup initially via the :meth:`FuncArgParser.setup_args` method. You can modify the parser for each command *cmd* by including a ``_modify_cmd`` method that accepts the subparser as an argument Parameters ---------- parser: FuncArgParser The parser to use. If None, a new one will be created subparsers: argparse._SubParsersAction The subparsers to use. If None, the :attr:`~ArgumentParser.add_subparser` method from `parser` will be called Returns ------- FuncArgParser The created command line parser or the given `parser` argparse._SubParsersAction The created subparsers action or the given `subparsers` dict A mapping from command name in the :attr:`commands` attribute to the corresponding command line parser See Also -------- parse_args""" commands = self.commands[:] parser_cmds = self.parser_commands.copy() if subparsers is None: if parser is None: parser = FuncArgParser(self.name) subparsers = parser.add_subparsers(chain=True) ret = {} for i, cmd in enumerate(commands[:]): func = getattr(self, cmd) parser_cmd = parser_cmds.setdefault(cmd, cmd.replace('_', '-')) ret[cmd] = sp = parser.setup_subparser( func, name=parser_cmd, return_parser=True) sp.setup_args(func) modifier = getattr(self, '_modify_' + cmd, None) if modifier is not None: modifier(sp) self.parser_commands = parser_cmds parser.setup_args(self.app_main) self._modify_app_main(parser) self.parser = parser self.subparsers = ret return parser, subparsers, ret
[ "def", "setup_parser", "(", "self", ",", "parser", "=", "None", ",", "subparsers", "=", "None", ")", ":", "commands", "=", "self", ".", "commands", "[", ":", "]", "parser_cmds", "=", "self", ".", "parser_commands", ".", "copy", "(", ")", "if", "subparsers", "is", "None", ":", "if", "parser", "is", "None", ":", "parser", "=", "FuncArgParser", "(", "self", ".", "name", ")", "subparsers", "=", "parser", ".", "add_subparsers", "(", "chain", "=", "True", ")", "ret", "=", "{", "}", "for", "i", ",", "cmd", "in", "enumerate", "(", "commands", "[", ":", "]", ")", ":", "func", "=", "getattr", "(", "self", ",", "cmd", ")", "parser_cmd", "=", "parser_cmds", ".", "setdefault", "(", "cmd", ",", "cmd", ".", "replace", "(", "'_'", ",", "'-'", ")", ")", "ret", "[", "cmd", "]", "=", "sp", "=", "parser", ".", "setup_subparser", "(", "func", ",", "name", "=", "parser_cmd", ",", "return_parser", "=", "True", ")", "sp", ".", "setup_args", "(", "func", ")", "modifier", "=", "getattr", "(", "self", ",", "'_modify_'", "+", "cmd", ",", "None", ")", "if", "modifier", "is", "not", "None", ":", "modifier", "(", "sp", ")", "self", ".", "parser_commands", "=", "parser_cmds", "parser", ".", "setup_args", "(", "self", ".", "app_main", ")", "self", ".", "_modify_app_main", "(", "parser", ")", "self", ".", "parser", "=", "parser", "self", ".", "subparsers", "=", "ret", "return", "parser", ",", "subparsers", ",", "ret" ]
Create the argument parser for this instance This method uses the functions defined in the :attr:`commands` attribute to create a command line utility via the :class:`FuncArgParser` class. Each command in the :attr:`commands` attribute is interpreted as on subparser and setup initially via the :meth:`FuncArgParser.setup_args` method. You can modify the parser for each command *cmd* by including a ``_modify_cmd`` method that accepts the subparser as an argument Parameters ---------- parser: FuncArgParser The parser to use. If None, a new one will be created subparsers: argparse._SubParsersAction The subparsers to use. If None, the :attr:`~ArgumentParser.add_subparser` method from `parser` will be called Returns ------- FuncArgParser The created command line parser or the given `parser` argparse._SubParsersAction The created subparsers action or the given `subparsers` dict A mapping from command name in the :attr:`commands` attribute to the corresponding command line parser See Also -------- parse_args
[ "Create", "the", "argument", "parser", "for", "this", "instance" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1495-L1552
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.get_parser
def get_parser(cls): """Function returning the command line parser for this class""" organizer = cls() organizer.setup_parser() organizer._finish_parser() return organizer.parser
python
def get_parser(cls): """Function returning the command line parser for this class""" organizer = cls() organizer.setup_parser() organizer._finish_parser() return organizer.parser
[ "def", "get_parser", "(", "cls", ")", ":", "organizer", "=", "cls", "(", ")", "organizer", ".", "setup_parser", "(", ")", "organizer", ".", "_finish_parser", "(", ")", "return", "organizer", ".", "parser" ]
Function returning the command line parser for this class
[ "Function", "returning", "the", "command", "line", "parser", "for", "this", "class" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1584-L1589
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer.is_archived
def is_archived(self, experiment, ignore_missing=True): """ Convenience function to determine whether the given experiment has been archived already Parameters ---------- experiment: str The experiment to check Returns ------- str or None The path to the archive if it has been archived, otherwise None """ if ignore_missing: if isinstance(self.config.experiments.get(experiment, True), Archive): return self.config.experiments.get(experiment, True) else: if isinstance(self.config.experiments[experiment], Archive): return self.config.experiments[experiment]
python
def is_archived(self, experiment, ignore_missing=True): """ Convenience function to determine whether the given experiment has been archived already Parameters ---------- experiment: str The experiment to check Returns ------- str or None The path to the archive if it has been archived, otherwise None """ if ignore_missing: if isinstance(self.config.experiments.get(experiment, True), Archive): return self.config.experiments.get(experiment, True) else: if isinstance(self.config.experiments[experiment], Archive): return self.config.experiments[experiment]
[ "def", "is_archived", "(", "self", ",", "experiment", ",", "ignore_missing", "=", "True", ")", ":", "if", "ignore_missing", ":", "if", "isinstance", "(", "self", ".", "config", ".", "experiments", ".", "get", "(", "experiment", ",", "True", ")", ",", "Archive", ")", ":", "return", "self", ".", "config", ".", "experiments", ".", "get", "(", "experiment", ",", "True", ")", "else", ":", "if", "isinstance", "(", "self", ".", "config", ".", "experiments", "[", "experiment", "]", ",", "Archive", ")", ":", "return", "self", ".", "config", ".", "experiments", "[", "experiment", "]" ]
Convenience function to determine whether the given experiment has been archived already Parameters ---------- experiment: str The experiment to check Returns ------- str or None The path to the archive if it has been archived, otherwise None
[ "Convenience", "function", "to", "determine", "whether", "the", "given", "experiment", "has", "been", "archived", "already" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1595-L1616
train
Chilipp/model-organization
model_organization/__init__.py
ModelOrganizer._archive_extensions
def _archive_extensions(): """Create translations from file extension to archive format Returns ------- dict The mapping from file extension to archive format dict The mapping from archive format to default file extension """ if six.PY3: ext_map = {} fmt_map = {} for key, exts, desc in shutil.get_unpack_formats(): fmt_map[key] = exts[0] for ext in exts: ext_map[ext] = key else: ext_map = {'.tar': 'tar', '.tar.bz2': 'bztar', '.tar.gz': 'gztar', '.tar.xz': 'xztar', '.tbz2': 'bztar', '.tgz': 'gztar', '.txz': 'xztar', '.zip': 'zip'} fmt_map = {'bztar': '.tar.bz2', 'gztar': '.tar.gz', 'tar': '.tar', 'xztar': '.tar.xz', 'zip': '.zip'} return ext_map, fmt_map
python
def _archive_extensions(): """Create translations from file extension to archive format Returns ------- dict The mapping from file extension to archive format dict The mapping from archive format to default file extension """ if six.PY3: ext_map = {} fmt_map = {} for key, exts, desc in shutil.get_unpack_formats(): fmt_map[key] = exts[0] for ext in exts: ext_map[ext] = key else: ext_map = {'.tar': 'tar', '.tar.bz2': 'bztar', '.tar.gz': 'gztar', '.tar.xz': 'xztar', '.tbz2': 'bztar', '.tgz': 'gztar', '.txz': 'xztar', '.zip': 'zip'} fmt_map = {'bztar': '.tar.bz2', 'gztar': '.tar.gz', 'tar': '.tar', 'xztar': '.tar.xz', 'zip': '.zip'} return ext_map, fmt_map
[ "def", "_archive_extensions", "(", ")", ":", "if", "six", ".", "PY3", ":", "ext_map", "=", "{", "}", "fmt_map", "=", "{", "}", "for", "key", ",", "exts", ",", "desc", "in", "shutil", ".", "get_unpack_formats", "(", ")", ":", "fmt_map", "[", "key", "]", "=", "exts", "[", "0", "]", "for", "ext", "in", "exts", ":", "ext_map", "[", "ext", "]", "=", "key", "else", ":", "ext_map", "=", "{", "'.tar'", ":", "'tar'", ",", "'.tar.bz2'", ":", "'bztar'", ",", "'.tar.gz'", ":", "'gztar'", ",", "'.tar.xz'", ":", "'xztar'", ",", "'.tbz2'", ":", "'bztar'", ",", "'.tgz'", ":", "'gztar'", ",", "'.txz'", ":", "'xztar'", ",", "'.zip'", ":", "'zip'", "}", "fmt_map", "=", "{", "'bztar'", ":", "'.tar.bz2'", ",", "'gztar'", ":", "'.tar.gz'", ",", "'tar'", ":", "'.tar'", ",", "'xztar'", ":", "'.tar.xz'", ",", "'zip'", ":", "'.zip'", "}", "return", "ext_map", ",", "fmt_map" ]
Create translations from file extension to archive format Returns ------- dict The mapping from file extension to archive format dict The mapping from archive format to default file extension
[ "Create", "translations", "from", "file", "extension", "to", "archive", "format" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1619-L1650
train
olitheolix/qtmacs
qtmacs/applets/pdf_reader.py
PDFReader.loadFile
def loadFile(self, fileName): """ Load and display the PDF file specified by ``fileName``. """ # Test if the file exists. if not QtCore.QFile(fileName).exists(): msg = "File <b>{}</b> does not exist".format(self.qteAppletID()) self.qteLogger.info(msg) self.fileName = None return # Store the file name and load the PDF document with the # Poppler library. self.fileName = fileName doc = popplerqt4.Poppler.Document.load(fileName) # Enable antialiasing to improve the readability of the fonts. doc.setRenderHint(popplerqt4.Poppler.Document.Antialiasing) doc.setRenderHint(popplerqt4.Poppler.Document.TextAntialiasing) # Convert each page to an image, then install that image as the # pixmap of a QLabel, and finally insert that QLabel into a # vertical layout. hbox = QtGui.QVBoxLayout() for ii in range(doc.numPages()): pdf_img = doc.page(ii).renderToImage() pdf_label = self.qteAddWidget(QtGui.QLabel()) pdf_label.setPixmap(QtGui.QPixmap.fromImage(pdf_img)) hbox.addWidget(pdf_label) # Use an auxiliary widget to hold that layout and then place # that auxiliary widget into a QScrollView. The auxiliary # widget is necessary because QScrollArea can only display a # single widget at once. tmp = self.qteAddWidget(QtGui.QWidget(self)) tmp.setLayout(hbox) self.qteScroll.setWidget(tmp)
python
def loadFile(self, fileName): """ Load and display the PDF file specified by ``fileName``. """ # Test if the file exists. if not QtCore.QFile(fileName).exists(): msg = "File <b>{}</b> does not exist".format(self.qteAppletID()) self.qteLogger.info(msg) self.fileName = None return # Store the file name and load the PDF document with the # Poppler library. self.fileName = fileName doc = popplerqt4.Poppler.Document.load(fileName) # Enable antialiasing to improve the readability of the fonts. doc.setRenderHint(popplerqt4.Poppler.Document.Antialiasing) doc.setRenderHint(popplerqt4.Poppler.Document.TextAntialiasing) # Convert each page to an image, then install that image as the # pixmap of a QLabel, and finally insert that QLabel into a # vertical layout. hbox = QtGui.QVBoxLayout() for ii in range(doc.numPages()): pdf_img = doc.page(ii).renderToImage() pdf_label = self.qteAddWidget(QtGui.QLabel()) pdf_label.setPixmap(QtGui.QPixmap.fromImage(pdf_img)) hbox.addWidget(pdf_label) # Use an auxiliary widget to hold that layout and then place # that auxiliary widget into a QScrollView. The auxiliary # widget is necessary because QScrollArea can only display a # single widget at once. tmp = self.qteAddWidget(QtGui.QWidget(self)) tmp.setLayout(hbox) self.qteScroll.setWidget(tmp)
[ "def", "loadFile", "(", "self", ",", "fileName", ")", ":", "# Test if the file exists.", "if", "not", "QtCore", ".", "QFile", "(", "fileName", ")", ".", "exists", "(", ")", ":", "msg", "=", "\"File <b>{}</b> does not exist\"", ".", "format", "(", "self", ".", "qteAppletID", "(", ")", ")", "self", ".", "qteLogger", ".", "info", "(", "msg", ")", "self", ".", "fileName", "=", "None", "return", "# Store the file name and load the PDF document with the", "# Poppler library.", "self", ".", "fileName", "=", "fileName", "doc", "=", "popplerqt4", ".", "Poppler", ".", "Document", ".", "load", "(", "fileName", ")", "# Enable antialiasing to improve the readability of the fonts.", "doc", ".", "setRenderHint", "(", "popplerqt4", ".", "Poppler", ".", "Document", ".", "Antialiasing", ")", "doc", ".", "setRenderHint", "(", "popplerqt4", ".", "Poppler", ".", "Document", ".", "TextAntialiasing", ")", "# Convert each page to an image, then install that image as the", "# pixmap of a QLabel, and finally insert that QLabel into a", "# vertical layout.", "hbox", "=", "QtGui", ".", "QVBoxLayout", "(", ")", "for", "ii", "in", "range", "(", "doc", ".", "numPages", "(", ")", ")", ":", "pdf_img", "=", "doc", ".", "page", "(", "ii", ")", ".", "renderToImage", "(", ")", "pdf_label", "=", "self", ".", "qteAddWidget", "(", "QtGui", ".", "QLabel", "(", ")", ")", "pdf_label", ".", "setPixmap", "(", "QtGui", ".", "QPixmap", ".", "fromImage", "(", "pdf_img", ")", ")", "hbox", ".", "addWidget", "(", "pdf_label", ")", "# Use an auxiliary widget to hold that layout and then place", "# that auxiliary widget into a QScrollView. The auxiliary", "# widget is necessary because QScrollArea can only display a", "# single widget at once.", "tmp", "=", "self", ".", "qteAddWidget", "(", "QtGui", ".", "QWidget", "(", "self", ")", ")", "tmp", ".", "setLayout", "(", "hbox", ")", "self", ".", "qteScroll", ".", "setWidget", "(", "tmp", ")" ]
Load and display the PDF file specified by ``fileName``.
[ "Load", "and", "display", "the", "PDF", "file", "specified", "by", "fileName", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/pdf_reader.py#L82-L119
train
AASHE/python-membersuite-api-client
membersuite_api_client/financial/services.py
get_product
def get_product(membersuite_id, client=None): """Return a Product object by ID. """ if not membersuite_id: return None client = client or get_new_client(request_session=True) object_query = "SELECT Object() FROM PRODUCT WHERE ID = '{}'".format( membersuite_id) result = client.execute_object_query(object_query) msql_result = result["body"]["ExecuteMSQLResult"] if msql_result["Success"]: membersuite_object_data = (msql_result["ResultValue"] ["SingleObject"]) else: raise ExecuteMSQLError(result=result) return Product(membersuite_object_data=membersuite_object_data)
python
def get_product(membersuite_id, client=None): """Return a Product object by ID. """ if not membersuite_id: return None client = client or get_new_client(request_session=True) object_query = "SELECT Object() FROM PRODUCT WHERE ID = '{}'".format( membersuite_id) result = client.execute_object_query(object_query) msql_result = result["body"]["ExecuteMSQLResult"] if msql_result["Success"]: membersuite_object_data = (msql_result["ResultValue"] ["SingleObject"]) else: raise ExecuteMSQLError(result=result) return Product(membersuite_object_data=membersuite_object_data)
[ "def", "get_product", "(", "membersuite_id", ",", "client", "=", "None", ")", ":", "if", "not", "membersuite_id", ":", "return", "None", "client", "=", "client", "or", "get_new_client", "(", "request_session", "=", "True", ")", "object_query", "=", "\"SELECT Object() FROM PRODUCT WHERE ID = '{}'\"", ".", "format", "(", "membersuite_id", ")", "result", "=", "client", ".", "execute_object_query", "(", "object_query", ")", "msql_result", "=", "result", "[", "\"body\"", "]", "[", "\"ExecuteMSQLResult\"", "]", "if", "msql_result", "[", "\"Success\"", "]", ":", "membersuite_object_data", "=", "(", "msql_result", "[", "\"ResultValue\"", "]", "[", "\"SingleObject\"", "]", ")", "else", ":", "raise", "ExecuteMSQLError", "(", "result", "=", "result", ")", "return", "Product", "(", "membersuite_object_data", "=", "membersuite_object_data", ")" ]
Return a Product object by ID.
[ "Return", "a", "Product", "object", "by", "ID", "." ]
221f5ed8bc7d4424237a4669c5af9edc11819ee9
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/financial/services.py#L6-L27
train
a1ezzz/wasp-general
wasp_general/signals/signals.py
WSignalSource.__watchers_callbacks_exec
def __watchers_callbacks_exec(self, signal_name): """ Generate callback for a queue :param signal_name: name of a signal that callback is generated for :type signal_name: str :rtype: callable """ def callback_fn(): for watcher in self.__watchers_callbacks[signal_name]: if watcher is not None: watcher.notify() return callback_fn
python
def __watchers_callbacks_exec(self, signal_name): """ Generate callback for a queue :param signal_name: name of a signal that callback is generated for :type signal_name: str :rtype: callable """ def callback_fn(): for watcher in self.__watchers_callbacks[signal_name]: if watcher is not None: watcher.notify() return callback_fn
[ "def", "__watchers_callbacks_exec", "(", "self", ",", "signal_name", ")", ":", "def", "callback_fn", "(", ")", ":", "for", "watcher", "in", "self", ".", "__watchers_callbacks", "[", "signal_name", "]", ":", "if", "watcher", "is", "not", "None", ":", "watcher", ".", "notify", "(", ")", "return", "callback_fn" ]
Generate callback for a queue :param signal_name: name of a signal that callback is generated for :type signal_name: str :rtype: callable
[ "Generate", "callback", "for", "a", "queue" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/signals/signals.py#L126-L138
train
Loudr/pale
pale/utils.py
py_doc_trim
def py_doc_trim(docstring): """Trim a python doc string. This example is nipped from https://www.python.org/dev/peps/pep-0257/, which describes how to conventionally format and trim docstrings. It has been modified to replace single newlines with a space, but leave multiple consecutive newlines in tact. """ if not docstring: return '' # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): indent = sys.maxint for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] if indent < sys.maxint: for line in lines[1:]: trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop(0) # The single string returned by the original function joined = '\n'.join(trimmed) # Return a version that replaces single newlines with spaces return newline_substitution_regex.sub(" ", joined)
python
def py_doc_trim(docstring): """Trim a python doc string. This example is nipped from https://www.python.org/dev/peps/pep-0257/, which describes how to conventionally format and trim docstrings. It has been modified to replace single newlines with a space, but leave multiple consecutive newlines in tact. """ if not docstring: return '' # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): indent = sys.maxint for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] if indent < sys.maxint: for line in lines[1:]: trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop(0) # The single string returned by the original function joined = '\n'.join(trimmed) # Return a version that replaces single newlines with spaces return newline_substitution_regex.sub(" ", joined)
[ "def", "py_doc_trim", "(", "docstring", ")", ":", "if", "not", "docstring", ":", "return", "''", "# Convert tabs to spaces (following the normal Python rules)", "# and split into a list of lines:", "lines", "=", "docstring", ".", "expandtabs", "(", ")", ".", "splitlines", "(", ")", "# Determine minimum indentation (first line doesn't count):", "indent", "=", "sys", ".", "maxint", "for", "line", "in", "lines", "[", "1", ":", "]", ":", "stripped", "=", "line", ".", "lstrip", "(", ")", "if", "stripped", ":", "indent", "=", "min", "(", "indent", ",", "len", "(", "line", ")", "-", "len", "(", "stripped", ")", ")", "# Remove indentation (first line is special):", "trimmed", "=", "[", "lines", "[", "0", "]", ".", "strip", "(", ")", "]", "if", "indent", "<", "sys", ".", "maxint", ":", "for", "line", "in", "lines", "[", "1", ":", "]", ":", "trimmed", ".", "append", "(", "line", "[", "indent", ":", "]", ".", "rstrip", "(", ")", ")", "# Strip off trailing and leading blank lines:", "while", "trimmed", "and", "not", "trimmed", "[", "-", "1", "]", ":", "trimmed", ".", "pop", "(", ")", "while", "trimmed", "and", "not", "trimmed", "[", "0", "]", ":", "trimmed", ".", "pop", "(", "0", ")", "# The single string returned by the original function", "joined", "=", "'\\n'", ".", "join", "(", "trimmed", ")", "# Return a version that replaces single newlines with spaces", "return", "newline_substitution_regex", ".", "sub", "(", "\" \"", ",", "joined", ")" ]
Trim a python doc string. This example is nipped from https://www.python.org/dev/peps/pep-0257/, which describes how to conventionally format and trim docstrings. It has been modified to replace single newlines with a space, but leave multiple consecutive newlines in tact.
[ "Trim", "a", "python", "doc", "string", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/utils.py#L11-L44
train
Loudr/pale
pale/resource.py
Resource._fix_up_fields
def _fix_up_fields(cls): """Add names to all of the Resource fields. This method will get called on class declaration because of Resource's metaclass. The functionality is based on Google's NDB implementation. `Endpoint` does something similar for `arguments`. """ cls._fields = {} if cls.__module__ == __name__ and cls.__name__ != 'DebugResource': return for name in set(dir(cls)): attr = getattr(cls, name, None) if isinstance(attr, BaseField): if name.startswith('_'): raise TypeError("Resource field %s cannot begin with an " "underscore. Underscore attributes are reserved " "for instance variables that aren't intended to " "propagate out to the HTTP caller." % name) attr._fix_up(cls, name) cls._fields[attr.name] = attr if cls._default_fields is None: cls._default_fields = tuple(cls._fields.keys())
python
def _fix_up_fields(cls): """Add names to all of the Resource fields. This method will get called on class declaration because of Resource's metaclass. The functionality is based on Google's NDB implementation. `Endpoint` does something similar for `arguments`. """ cls._fields = {} if cls.__module__ == __name__ and cls.__name__ != 'DebugResource': return for name in set(dir(cls)): attr = getattr(cls, name, None) if isinstance(attr, BaseField): if name.startswith('_'): raise TypeError("Resource field %s cannot begin with an " "underscore. Underscore attributes are reserved " "for instance variables that aren't intended to " "propagate out to the HTTP caller." % name) attr._fix_up(cls, name) cls._fields[attr.name] = attr if cls._default_fields is None: cls._default_fields = tuple(cls._fields.keys())
[ "def", "_fix_up_fields", "(", "cls", ")", ":", "cls", ".", "_fields", "=", "{", "}", "if", "cls", ".", "__module__", "==", "__name__", "and", "cls", ".", "__name__", "!=", "'DebugResource'", ":", "return", "for", "name", "in", "set", "(", "dir", "(", "cls", ")", ")", ":", "attr", "=", "getattr", "(", "cls", ",", "name", ",", "None", ")", "if", "isinstance", "(", "attr", ",", "BaseField", ")", ":", "if", "name", ".", "startswith", "(", "'_'", ")", ":", "raise", "TypeError", "(", "\"Resource field %s cannot begin with an \"", "\"underscore. Underscore attributes are reserved \"", "\"for instance variables that aren't intended to \"", "\"propagate out to the HTTP caller.\"", "%", "name", ")", "attr", ".", "_fix_up", "(", "cls", ",", "name", ")", "cls", ".", "_fields", "[", "attr", ".", "name", "]", "=", "attr", "if", "cls", ".", "_default_fields", "is", "None", ":", "cls", ".", "_default_fields", "=", "tuple", "(", "cls", ".", "_fields", ".", "keys", "(", ")", ")" ]
Add names to all of the Resource fields. This method will get called on class declaration because of Resource's metaclass. The functionality is based on Google's NDB implementation. `Endpoint` does something similar for `arguments`.
[ "Add", "names", "to", "all", "of", "the", "Resource", "fields", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L19-L42
train
Loudr/pale
pale/resource.py
Resource._render_serializable
def _render_serializable(self, obj, context): """Renders a JSON-serializable version of the object passed in. Usually this means turning a Python object into a dict, but sometimes it might make sense to render a list, or a string, or a tuple. In this base class, we provide a default implementation that assumes some things about your application architecture, namely, that your models specified in `underlying_model` have properties with the same name as all of the `_fields` that you've specified on a resource, and that all of those fields are public. Obviously this may not be appropriate for your app, so your subclass(es) of Resource should implement this method to serialize your things in the way that works for you. Do what you need to do. The world is your oyster. """ logging.info("""Careful, you're calling ._render_serializable on the base resource, which is probably not what you actually want to be doing!""") if obj is None: logging.debug( "_render_serializable passed a None obj, returning None") return None output = {} if self._fields_to_render is None: return output for field in self._fields_to_render: renderer = self._fields[field].render output[field] = renderer(obj, field, context) return output
python
def _render_serializable(self, obj, context): """Renders a JSON-serializable version of the object passed in. Usually this means turning a Python object into a dict, but sometimes it might make sense to render a list, or a string, or a tuple. In this base class, we provide a default implementation that assumes some things about your application architecture, namely, that your models specified in `underlying_model` have properties with the same name as all of the `_fields` that you've specified on a resource, and that all of those fields are public. Obviously this may not be appropriate for your app, so your subclass(es) of Resource should implement this method to serialize your things in the way that works for you. Do what you need to do. The world is your oyster. """ logging.info("""Careful, you're calling ._render_serializable on the base resource, which is probably not what you actually want to be doing!""") if obj is None: logging.debug( "_render_serializable passed a None obj, returning None") return None output = {} if self._fields_to_render is None: return output for field in self._fields_to_render: renderer = self._fields[field].render output[field] = renderer(obj, field, context) return output
[ "def", "_render_serializable", "(", "self", ",", "obj", ",", "context", ")", ":", "logging", ".", "info", "(", "\"\"\"Careful, you're calling ._render_serializable on the\n base resource, which is probably not what you actually want to be\n doing!\"\"\"", ")", "if", "obj", "is", "None", ":", "logging", ".", "debug", "(", "\"_render_serializable passed a None obj, returning None\"", ")", "return", "None", "output", "=", "{", "}", "if", "self", ".", "_fields_to_render", "is", "None", ":", "return", "output", "for", "field", "in", "self", ".", "_fields_to_render", ":", "renderer", "=", "self", ".", "_fields", "[", "field", "]", ".", "render", "output", "[", "field", "]", "=", "renderer", "(", "obj", ",", "field", ",", "context", ")", "return", "output" ]
Renders a JSON-serializable version of the object passed in. Usually this means turning a Python object into a dict, but sometimes it might make sense to render a list, or a string, or a tuple. In this base class, we provide a default implementation that assumes some things about your application architecture, namely, that your models specified in `underlying_model` have properties with the same name as all of the `_fields` that you've specified on a resource, and that all of those fields are public. Obviously this may not be appropriate for your app, so your subclass(es) of Resource should implement this method to serialize your things in the way that works for you. Do what you need to do. The world is your oyster.
[ "Renders", "a", "JSON", "-", "serializable", "version", "of", "the", "object", "passed", "in", ".", "Usually", "this", "means", "turning", "a", "Python", "object", "into", "a", "dict", "but", "sometimes", "it", "might", "make", "sense", "to", "render", "a", "list", "or", "a", "string", "or", "a", "tuple", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L59-L89
train
Loudr/pale
pale/resource.py
ResourceList._render_serializable
def _render_serializable(self, list_of_objs, context): """Iterates through the passed in `list_of_objs` and calls the `_render_serializable` method of each object's Resource type. This will probably support heterogeneous types at some point (hence the `item_types` initialization, as opposed to just item_type), but that might be better suited to something else like a ResourceDict. This method returns a JSON-serializable list of JSON-serializable dicts. """ output = [] for obj in list_of_objs: if obj is not None: item = self._item_resource._render_serializable(obj, context) output.append(item) return output
python
def _render_serializable(self, list_of_objs, context): """Iterates through the passed in `list_of_objs` and calls the `_render_serializable` method of each object's Resource type. This will probably support heterogeneous types at some point (hence the `item_types` initialization, as opposed to just item_type), but that might be better suited to something else like a ResourceDict. This method returns a JSON-serializable list of JSON-serializable dicts. """ output = [] for obj in list_of_objs: if obj is not None: item = self._item_resource._render_serializable(obj, context) output.append(item) return output
[ "def", "_render_serializable", "(", "self", ",", "list_of_objs", ",", "context", ")", ":", "output", "=", "[", "]", "for", "obj", "in", "list_of_objs", ":", "if", "obj", "is", "not", "None", ":", "item", "=", "self", ".", "_item_resource", ".", "_render_serializable", "(", "obj", ",", "context", ")", "output", ".", "append", "(", "item", ")", "return", "output" ]
Iterates through the passed in `list_of_objs` and calls the `_render_serializable` method of each object's Resource type. This will probably support heterogeneous types at some point (hence the `item_types` initialization, as opposed to just item_type), but that might be better suited to something else like a ResourceDict. This method returns a JSON-serializable list of JSON-serializable dicts.
[ "Iterates", "through", "the", "passed", "in", "list_of_objs", "and", "calls", "the", "_render_serializable", "method", "of", "each", "object", "s", "Resource", "type", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L116-L132
train
a1ezzz/wasp-general
wasp_general/thread.py
critical_section_dynamic_lock
def critical_section_dynamic_lock(lock_fn, blocking=True, timeout=None, raise_exception=True): """ Protect a function with a lock, that was get from the specified function. If a lock can not be acquire, then no function call will be made :param lock_fn: callable that returns a lock, with which a function may be protected :param blocking: whenever to block operations with lock acquiring :param timeout: timeout with which a lock acquiring will be made :param raise_exception: whenever to raise an WCriticalSectionError exception if lock can not be acquired :return: decorator with which a target function may be protected """ if blocking is False or timeout is None: timeout = -1 def first_level_decorator(decorated_function): def second_level_decorator(original_function, *args, **kwargs): lock = lock_fn(*args, **kwargs) if lock.acquire(blocking=blocking, timeout=timeout) is True: try: result = original_function(*args, **kwargs) return result finally: lock.release() elif raise_exception is True: raise WCriticalSectionError('Unable to lock critical section\n') return decorator(second_level_decorator)(decorated_function) return first_level_decorator
python
def critical_section_dynamic_lock(lock_fn, blocking=True, timeout=None, raise_exception=True): """ Protect a function with a lock, that was get from the specified function. If a lock can not be acquire, then no function call will be made :param lock_fn: callable that returns a lock, with which a function may be protected :param blocking: whenever to block operations with lock acquiring :param timeout: timeout with which a lock acquiring will be made :param raise_exception: whenever to raise an WCriticalSectionError exception if lock can not be acquired :return: decorator with which a target function may be protected """ if blocking is False or timeout is None: timeout = -1 def first_level_decorator(decorated_function): def second_level_decorator(original_function, *args, **kwargs): lock = lock_fn(*args, **kwargs) if lock.acquire(blocking=blocking, timeout=timeout) is True: try: result = original_function(*args, **kwargs) return result finally: lock.release() elif raise_exception is True: raise WCriticalSectionError('Unable to lock critical section\n') return decorator(second_level_decorator)(decorated_function) return first_level_decorator
[ "def", "critical_section_dynamic_lock", "(", "lock_fn", ",", "blocking", "=", "True", ",", "timeout", "=", "None", ",", "raise_exception", "=", "True", ")", ":", "if", "blocking", "is", "False", "or", "timeout", "is", "None", ":", "timeout", "=", "-", "1", "def", "first_level_decorator", "(", "decorated_function", ")", ":", "def", "second_level_decorator", "(", "original_function", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "lock", "=", "lock_fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "lock", ".", "acquire", "(", "blocking", "=", "blocking", ",", "timeout", "=", "timeout", ")", "is", "True", ":", "try", ":", "result", "=", "original_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "result", "finally", ":", "lock", ".", "release", "(", ")", "elif", "raise_exception", "is", "True", ":", "raise", "WCriticalSectionError", "(", "'Unable to lock critical section\\n'", ")", "return", "decorator", "(", "second_level_decorator", ")", "(", "decorated_function", ")", "return", "first_level_decorator" ]
Protect a function with a lock, that was get from the specified function. If a lock can not be acquire, then no function call will be made :param lock_fn: callable that returns a lock, with which a function may be protected :param blocking: whenever to block operations with lock acquiring :param timeout: timeout with which a lock acquiring will be made :param raise_exception: whenever to raise an WCriticalSectionError exception if lock can not be acquired :return: decorator with which a target function may be protected
[ "Protect", "a", "function", "with", "a", "lock", "that", "was", "get", "from", "the", "specified", "function", ".", "If", "a", "lock", "can", "not", "be", "acquire", "then", "no", "function", "call", "will", "be", "made" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/thread.py#L42-L70
train
Loudr/pale
pale/doc.py
generate_json_docs
def generate_json_docs(module, pretty_print=False, user=None): """Return a JSON string format of a Pale module's documentation. This string can either be printed out, written to a file, or piped to some other tool. This method is a shorthand for calling `generate_doc_dict` and passing it into a json serializer. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. """ indent = None separators = (',', ':') if pretty_print: indent = 4 separators = (',', ': ') module_doc_dict = generate_doc_dict(module, user) json_str = json.dumps(module_doc_dict, indent=indent, separators=separators) return json_str
python
def generate_json_docs(module, pretty_print=False, user=None): """Return a JSON string format of a Pale module's documentation. This string can either be printed out, written to a file, or piped to some other tool. This method is a shorthand for calling `generate_doc_dict` and passing it into a json serializer. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. """ indent = None separators = (',', ':') if pretty_print: indent = 4 separators = (',', ': ') module_doc_dict = generate_doc_dict(module, user) json_str = json.dumps(module_doc_dict, indent=indent, separators=separators) return json_str
[ "def", "generate_json_docs", "(", "module", ",", "pretty_print", "=", "False", ",", "user", "=", "None", ")", ":", "indent", "=", "None", "separators", "=", "(", "','", ",", "':'", ")", "if", "pretty_print", ":", "indent", "=", "4", "separators", "=", "(", "','", ",", "': '", ")", "module_doc_dict", "=", "generate_doc_dict", "(", "module", ",", "user", ")", "json_str", "=", "json", ".", "dumps", "(", "module_doc_dict", ",", "indent", "=", "indent", ",", "separators", "=", "separators", ")", "return", "json_str" ]
Return a JSON string format of a Pale module's documentation. This string can either be printed out, written to a file, or piped to some other tool. This method is a shorthand for calling `generate_doc_dict` and passing it into a json serializer. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint.
[ "Return", "a", "JSON", "string", "format", "of", "a", "Pale", "module", "s", "documentation", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L81-L104
train
Loudr/pale
pale/doc.py
generate_raml_docs
def generate_raml_docs(module, fields, shared_types, user=None, title="My API", version="v1", api_root="api", base_uri="http://mysite.com/{version}"): """Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info. """ output = StringIO() # Add the RAML header info output.write('#%RAML 1.0 \n') output.write('title: ' + title + ' \n') output.write('baseUri: ' + base_uri + ' \n') output.write('version: ' + version + '\n') output.write('mediaType: application/json\n\n') output.write('documentation:\n') output.write(' - title: Welcome\n') output.write(' content: |\n') output.write("""\ Welcome to the Loudr API Docs.\n You'll find comprehensive documentation on our endpoints and resources here. """) output.write("\n###############\n# Resource Types:\n###############\n\n") output.write('types:\n') basic_fields = [] for field_module in inspect.getmembers(fields, inspect.ismodule): for field_class in inspect.getmembers(field_module[1], inspect.isclass): basic_fields.append(field_class[1]) pale_basic_types = generate_basic_type_docs(basic_fields, {}) output.write("\n# Pale Basic Types:\n\n") output.write(pale_basic_types[0]) shared_fields = [] for shared_type in shared_types: for field_class in inspect.getmembers(shared_type, inspect.isclass): shared_fields.append(field_class[1]) pale_shared_types = generate_basic_type_docs(shared_fields, pale_basic_types[1]) output.write("\n# Pale Shared Types:\n\n") output.write(pale_shared_types[0]) raml_resource_types = generate_raml_resource_types(module) output.write("\n# API Resource Types:\n\n") output.write(raml_resource_types) raml_resources = generate_raml_resources(module, api_root, user) output.write("\n\n###############\n# API Endpoints:\n###############\n\n") output.write(raml_resources) raml_docs = output.getvalue() output.close() return raml_docs
python
def generate_raml_docs(module, fields, shared_types, user=None, title="My API", version="v1", api_root="api", base_uri="http://mysite.com/{version}"): """Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info. """ output = StringIO() # Add the RAML header info output.write('#%RAML 1.0 \n') output.write('title: ' + title + ' \n') output.write('baseUri: ' + base_uri + ' \n') output.write('version: ' + version + '\n') output.write('mediaType: application/json\n\n') output.write('documentation:\n') output.write(' - title: Welcome\n') output.write(' content: |\n') output.write("""\ Welcome to the Loudr API Docs.\n You'll find comprehensive documentation on our endpoints and resources here. """) output.write("\n###############\n# Resource Types:\n###############\n\n") output.write('types:\n') basic_fields = [] for field_module in inspect.getmembers(fields, inspect.ismodule): for field_class in inspect.getmembers(field_module[1], inspect.isclass): basic_fields.append(field_class[1]) pale_basic_types = generate_basic_type_docs(basic_fields, {}) output.write("\n# Pale Basic Types:\n\n") output.write(pale_basic_types[0]) shared_fields = [] for shared_type in shared_types: for field_class in inspect.getmembers(shared_type, inspect.isclass): shared_fields.append(field_class[1]) pale_shared_types = generate_basic_type_docs(shared_fields, pale_basic_types[1]) output.write("\n# Pale Shared Types:\n\n") output.write(pale_shared_types[0]) raml_resource_types = generate_raml_resource_types(module) output.write("\n# API Resource Types:\n\n") output.write(raml_resource_types) raml_resources = generate_raml_resources(module, api_root, user) output.write("\n\n###############\n# API Endpoints:\n###############\n\n") output.write(raml_resources) raml_docs = output.getvalue() output.close() return raml_docs
[ "def", "generate_raml_docs", "(", "module", ",", "fields", ",", "shared_types", ",", "user", "=", "None", ",", "title", "=", "\"My API\"", ",", "version", "=", "\"v1\"", ",", "api_root", "=", "\"api\"", ",", "base_uri", "=", "\"http://mysite.com/{version}\"", ")", ":", "output", "=", "StringIO", "(", ")", "# Add the RAML header info", "output", ".", "write", "(", "'#%RAML 1.0 \\n'", ")", "output", ".", "write", "(", "'title: '", "+", "title", "+", "' \\n'", ")", "output", ".", "write", "(", "'baseUri: '", "+", "base_uri", "+", "' \\n'", ")", "output", ".", "write", "(", "'version: '", "+", "version", "+", "'\\n'", ")", "output", ".", "write", "(", "'mediaType: application/json\\n\\n'", ")", "output", ".", "write", "(", "'documentation:\\n'", ")", "output", ".", "write", "(", "' - title: Welcome\\n'", ")", "output", ".", "write", "(", "' content: |\\n'", ")", "output", ".", "write", "(", "\"\"\"\\\n Welcome to the Loudr API Docs.\\n\n You'll find comprehensive documentation on our endpoints and resources here.\n \"\"\"", ")", "output", ".", "write", "(", "\"\\n###############\\n# Resource Types:\\n###############\\n\\n\"", ")", "output", ".", "write", "(", "'types:\\n'", ")", "basic_fields", "=", "[", "]", "for", "field_module", "in", "inspect", ".", "getmembers", "(", "fields", ",", "inspect", ".", "ismodule", ")", ":", "for", "field_class", "in", "inspect", ".", "getmembers", "(", "field_module", "[", "1", "]", ",", "inspect", ".", "isclass", ")", ":", "basic_fields", ".", "append", "(", "field_class", "[", "1", "]", ")", "pale_basic_types", "=", "generate_basic_type_docs", "(", "basic_fields", ",", "{", "}", ")", "output", ".", "write", "(", "\"\\n# Pale Basic Types:\\n\\n\"", ")", "output", ".", "write", "(", "pale_basic_types", "[", "0", "]", ")", "shared_fields", "=", "[", "]", "for", "shared_type", "in", "shared_types", ":", "for", "field_class", "in", "inspect", ".", "getmembers", "(", "shared_type", ",", "inspect", ".", "isclass", ")", ":", "shared_fields", ".", "append", "(", "field_class", "[", "1", "]", ")", "pale_shared_types", "=", "generate_basic_type_docs", "(", "shared_fields", ",", "pale_basic_types", "[", "1", "]", ")", "output", ".", "write", "(", "\"\\n# Pale Shared Types:\\n\\n\"", ")", "output", ".", "write", "(", "pale_shared_types", "[", "0", "]", ")", "raml_resource_types", "=", "generate_raml_resource_types", "(", "module", ")", "output", ".", "write", "(", "\"\\n# API Resource Types:\\n\\n\"", ")", "output", ".", "write", "(", "raml_resource_types", ")", "raml_resources", "=", "generate_raml_resources", "(", "module", ",", "api_root", ",", "user", ")", "output", ".", "write", "(", "\"\\n\\n###############\\n# API Endpoints:\\n###############\\n\\n\"", ")", "output", ".", "write", "(", "raml_resources", ")", "raml_docs", "=", "output", ".", "getvalue", "(", ")", "output", ".", "close", "(", ")", "return", "raml_docs" ]
Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info.
[ "Return", "a", "RAML", "file", "of", "a", "Pale", "module", "s", "documentation", "as", "a", "string", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L107-L169
train
Loudr/pale
pale/doc.py
generate_basic_type_docs
def generate_basic_type_docs(fields, existing_types): """Map resource types to their RAML equivalents. Expects fields to be a list of modules - each module would be something like pale.fields. Expects existing_types to be a list of dict of existing types, which will take precedence and prevent a new type with the same name from being added. For more on RAML built-in-types, see: https://github.com/raml-org/raml-spec/blob/master/versions/raml-10/raml-10.md#built-in-types """ # These types are standard in RAML 1.0 # They should not be defined in the RAML file that we return # We will inherit from them in the types we define raml_built_in_types = { "any": { "parent": None, }, "time-only": { "parent": "any", }, "datetime": { "parent": "any", "pale_children": ["timestamp"], }, "datetime-only": { "parent": "any", }, "date-only": { "parent": "any", "pale_children": ["date"], }, "number": { "parent": "any", }, "boolean": { "parent": "any", "pale_children": ["boolean"] }, "string": { "parent": "any", "pale_children": ["url", "string", "uri"], }, "null": { "parent": "any", }, "file": { "parent": "any", }, "array": { "parent": "any", "pale_children": ["list"], }, "object": { "parent": "any", }, "union": { "parent": "any", }, "XSD Schema": { "parent": "any", }, "JSON Schema": { "parent": "any", }, "integer": { "parent": "number", "pale_children": ["integer"], }, } basic_types = {} # Find all classes defined in a set of resources and build up an object with # the relevant details of the basic types for field in fields: # if this is a Pale type, it will have a 'value_type' property, if hasattr(field, "value_type"): type_name = field.value_type.replace(" ", "_") # add this type only if it is not in the built-in raml types and we have # not added it yet if type_name not in raml_built_in_types \ and type_name not in basic_types \ and type_name not in existing_types: basic_types[type_name] = {} # strip newlines and leading whitespaces from doc string, then add as description if hasattr(field, "__doc__"): modified_description = clean_description(field.__doc__) basic_types[type_name]["description"] = modified_description # if this type is listed as the child of a built-in raml type, # use the raml type as its parent type for raml_type in raml_built_in_types: if "pale_children" in raml_built_in_types[raml_type]: if type_name in raml_built_in_types[raml_type]["pale_children"]: basic_types[type_name]["type"] = raml_type break else: # if this is not the child of a built-in raml type # and if this type is a list composed of other items: if hasattr(field, "is_list") and field.is_list: basic_types[type_name]["type"] = "array" # and the type is defined, use the defined type if hasattr(field, "list_item_type") and field.list_item_type != None: basic_types[type_name]["items"] = field.list_item_type # otherwise, use the base type else: basic_types[type_name]["items"] = "base" # otherwise use the pale parent class as its type else: pale_parent_class = field.__mro__[1] # if we are at the base class, inherit from the RAML "object" type if pale_parent_class.__name__ == "object": basic_types[type_name]["type"] = "object" # otherwise, inherit from the named parent else: basic_types[type_name]["type"] = pale_parent_class.value_type ordered_basic_types = OrderedDict(sorted(basic_types.items(), key=lambda t: t[0])) basic_docs = generate_type_docs(ordered_basic_types) return (basic_docs, basic_types)
python
def generate_basic_type_docs(fields, existing_types): """Map resource types to their RAML equivalents. Expects fields to be a list of modules - each module would be something like pale.fields. Expects existing_types to be a list of dict of existing types, which will take precedence and prevent a new type with the same name from being added. For more on RAML built-in-types, see: https://github.com/raml-org/raml-spec/blob/master/versions/raml-10/raml-10.md#built-in-types """ # These types are standard in RAML 1.0 # They should not be defined in the RAML file that we return # We will inherit from them in the types we define raml_built_in_types = { "any": { "parent": None, }, "time-only": { "parent": "any", }, "datetime": { "parent": "any", "pale_children": ["timestamp"], }, "datetime-only": { "parent": "any", }, "date-only": { "parent": "any", "pale_children": ["date"], }, "number": { "parent": "any", }, "boolean": { "parent": "any", "pale_children": ["boolean"] }, "string": { "parent": "any", "pale_children": ["url", "string", "uri"], }, "null": { "parent": "any", }, "file": { "parent": "any", }, "array": { "parent": "any", "pale_children": ["list"], }, "object": { "parent": "any", }, "union": { "parent": "any", }, "XSD Schema": { "parent": "any", }, "JSON Schema": { "parent": "any", }, "integer": { "parent": "number", "pale_children": ["integer"], }, } basic_types = {} # Find all classes defined in a set of resources and build up an object with # the relevant details of the basic types for field in fields: # if this is a Pale type, it will have a 'value_type' property, if hasattr(field, "value_type"): type_name = field.value_type.replace(" ", "_") # add this type only if it is not in the built-in raml types and we have # not added it yet if type_name not in raml_built_in_types \ and type_name not in basic_types \ and type_name not in existing_types: basic_types[type_name] = {} # strip newlines and leading whitespaces from doc string, then add as description if hasattr(field, "__doc__"): modified_description = clean_description(field.__doc__) basic_types[type_name]["description"] = modified_description # if this type is listed as the child of a built-in raml type, # use the raml type as its parent type for raml_type in raml_built_in_types: if "pale_children" in raml_built_in_types[raml_type]: if type_name in raml_built_in_types[raml_type]["pale_children"]: basic_types[type_name]["type"] = raml_type break else: # if this is not the child of a built-in raml type # and if this type is a list composed of other items: if hasattr(field, "is_list") and field.is_list: basic_types[type_name]["type"] = "array" # and the type is defined, use the defined type if hasattr(field, "list_item_type") and field.list_item_type != None: basic_types[type_name]["items"] = field.list_item_type # otherwise, use the base type else: basic_types[type_name]["items"] = "base" # otherwise use the pale parent class as its type else: pale_parent_class = field.__mro__[1] # if we are at the base class, inherit from the RAML "object" type if pale_parent_class.__name__ == "object": basic_types[type_name]["type"] = "object" # otherwise, inherit from the named parent else: basic_types[type_name]["type"] = pale_parent_class.value_type ordered_basic_types = OrderedDict(sorted(basic_types.items(), key=lambda t: t[0])) basic_docs = generate_type_docs(ordered_basic_types) return (basic_docs, basic_types)
[ "def", "generate_basic_type_docs", "(", "fields", ",", "existing_types", ")", ":", "# These types are standard in RAML 1.0", "# They should not be defined in the RAML file that we return", "# We will inherit from them in the types we define", "raml_built_in_types", "=", "{", "\"any\"", ":", "{", "\"parent\"", ":", "None", ",", "}", ",", "\"time-only\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"datetime\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "\"pale_children\"", ":", "[", "\"timestamp\"", "]", ",", "}", ",", "\"datetime-only\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"date-only\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "\"pale_children\"", ":", "[", "\"date\"", "]", ",", "}", ",", "\"number\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"boolean\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "\"pale_children\"", ":", "[", "\"boolean\"", "]", "}", ",", "\"string\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "\"pale_children\"", ":", "[", "\"url\"", ",", "\"string\"", ",", "\"uri\"", "]", ",", "}", ",", "\"null\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"file\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"array\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "\"pale_children\"", ":", "[", "\"list\"", "]", ",", "}", ",", "\"object\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"union\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"XSD Schema\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"JSON Schema\"", ":", "{", "\"parent\"", ":", "\"any\"", ",", "}", ",", "\"integer\"", ":", "{", "\"parent\"", ":", "\"number\"", ",", "\"pale_children\"", ":", "[", "\"integer\"", "]", ",", "}", ",", "}", "basic_types", "=", "{", "}", "# Find all classes defined in a set of resources and build up an object with", "# the relevant details of the basic types", "for", "field", "in", "fields", ":", "# if this is a Pale type, it will have a 'value_type' property,", "if", "hasattr", "(", "field", ",", "\"value_type\"", ")", ":", "type_name", "=", "field", ".", "value_type", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", "# add this type only if it is not in the built-in raml types and we have", "# not added it yet", "if", "type_name", "not", "in", "raml_built_in_types", "and", "type_name", "not", "in", "basic_types", "and", "type_name", "not", "in", "existing_types", ":", "basic_types", "[", "type_name", "]", "=", "{", "}", "# strip newlines and leading whitespaces from doc string, then add as description", "if", "hasattr", "(", "field", ",", "\"__doc__\"", ")", ":", "modified_description", "=", "clean_description", "(", "field", ".", "__doc__", ")", "basic_types", "[", "type_name", "]", "[", "\"description\"", "]", "=", "modified_description", "# if this type is listed as the child of a built-in raml type,", "# use the raml type as its parent type", "for", "raml_type", "in", "raml_built_in_types", ":", "if", "\"pale_children\"", "in", "raml_built_in_types", "[", "raml_type", "]", ":", "if", "type_name", "in", "raml_built_in_types", "[", "raml_type", "]", "[", "\"pale_children\"", "]", ":", "basic_types", "[", "type_name", "]", "[", "\"type\"", "]", "=", "raml_type", "break", "else", ":", "# if this is not the child of a built-in raml type", "# and if this type is a list composed of other items:", "if", "hasattr", "(", "field", ",", "\"is_list\"", ")", "and", "field", ".", "is_list", ":", "basic_types", "[", "type_name", "]", "[", "\"type\"", "]", "=", "\"array\"", "# and the type is defined, use the defined type", "if", "hasattr", "(", "field", ",", "\"list_item_type\"", ")", "and", "field", ".", "list_item_type", "!=", "None", ":", "basic_types", "[", "type_name", "]", "[", "\"items\"", "]", "=", "field", ".", "list_item_type", "# otherwise, use the base type", "else", ":", "basic_types", "[", "type_name", "]", "[", "\"items\"", "]", "=", "\"base\"", "# otherwise use the pale parent class as its type", "else", ":", "pale_parent_class", "=", "field", ".", "__mro__", "[", "1", "]", "# if we are at the base class, inherit from the RAML \"object\" type", "if", "pale_parent_class", ".", "__name__", "==", "\"object\"", ":", "basic_types", "[", "type_name", "]", "[", "\"type\"", "]", "=", "\"object\"", "# otherwise, inherit from the named parent", "else", ":", "basic_types", "[", "type_name", "]", "[", "\"type\"", "]", "=", "pale_parent_class", ".", "value_type", "ordered_basic_types", "=", "OrderedDict", "(", "sorted", "(", "basic_types", ".", "items", "(", ")", ",", "key", "=", "lambda", "t", ":", "t", "[", "0", "]", ")", ")", "basic_docs", "=", "generate_type_docs", "(", "ordered_basic_types", ")", "return", "(", "basic_docs", ",", "basic_types", ")" ]
Map resource types to their RAML equivalents. Expects fields to be a list of modules - each module would be something like pale.fields. Expects existing_types to be a list of dict of existing types, which will take precedence and prevent a new type with the same name from being added. For more on RAML built-in-types, see: https://github.com/raml-org/raml-spec/blob/master/versions/raml-10/raml-10.md#built-in-types
[ "Map", "resource", "types", "to", "their", "RAML", "equivalents", ".", "Expects", "fields", "to", "be", "a", "list", "of", "modules", "-", "each", "module", "would", "be", "something", "like", "pale", ".", "fields", ".", "Expects", "existing_types", "to", "be", "a", "list", "of", "dict", "of", "existing", "types", "which", "will", "take", "precedence", "and", "prevent", "a", "new", "type", "with", "the", "same", "name", "from", "being", "added", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L215-L346
train
Loudr/pale
pale/doc.py
generate_doc_dict
def generate_doc_dict(module, user): """Compile a Pale module's documentation into a python dictionary. The returned dictionary is suitable to be rendered by a JSON formatter, or passed to a template engine, or manipulated in some other way. """ from pale import extract_endpoints, extract_resources, is_pale_module if not is_pale_module(module): raise ValueError( """The passed in `module` (%s) is not a pale module. `paledoc` only works on modules with a `_module_type` set to equal `pale.ImplementationModule`.""") module_endpoints = extract_endpoints(module) ep_doc = { ep._route_name: document_endpoint(ep) for ep \ in module_endpoints } ep_doc_filtered = {} for endpoint in ep_doc: # check if user has permission to view this endpoint # this is currently an on/off switch: if any endpoint has a "@requires_permission" # decorator, user.is_admin must be True for the user to see documentation # @TODO - make this permission more granular if necessary if ep_doc[endpoint].get("requires_permission") != None and user != None and user.is_admin or \ ep_doc[endpoint].get("requires_permission") == None: ep_doc_filtered[endpoint] = ep_doc[endpoint] module_resources = extract_resources(module) res_doc = { r._value_type: document_resource(r) for r \ in module_resources } return {'endpoints': ep_doc_filtered, 'resources': res_doc}
python
def generate_doc_dict(module, user): """Compile a Pale module's documentation into a python dictionary. The returned dictionary is suitable to be rendered by a JSON formatter, or passed to a template engine, or manipulated in some other way. """ from pale import extract_endpoints, extract_resources, is_pale_module if not is_pale_module(module): raise ValueError( """The passed in `module` (%s) is not a pale module. `paledoc` only works on modules with a `_module_type` set to equal `pale.ImplementationModule`.""") module_endpoints = extract_endpoints(module) ep_doc = { ep._route_name: document_endpoint(ep) for ep \ in module_endpoints } ep_doc_filtered = {} for endpoint in ep_doc: # check if user has permission to view this endpoint # this is currently an on/off switch: if any endpoint has a "@requires_permission" # decorator, user.is_admin must be True for the user to see documentation # @TODO - make this permission more granular if necessary if ep_doc[endpoint].get("requires_permission") != None and user != None and user.is_admin or \ ep_doc[endpoint].get("requires_permission") == None: ep_doc_filtered[endpoint] = ep_doc[endpoint] module_resources = extract_resources(module) res_doc = { r._value_type: document_resource(r) for r \ in module_resources } return {'endpoints': ep_doc_filtered, 'resources': res_doc}
[ "def", "generate_doc_dict", "(", "module", ",", "user", ")", ":", "from", "pale", "import", "extract_endpoints", ",", "extract_resources", ",", "is_pale_module", "if", "not", "is_pale_module", "(", "module", ")", ":", "raise", "ValueError", "(", "\"\"\"The passed in `module` (%s) is not a pale module. `paledoc`\n only works on modules with a `_module_type` set to equal\n `pale.ImplementationModule`.\"\"\"", ")", "module_endpoints", "=", "extract_endpoints", "(", "module", ")", "ep_doc", "=", "{", "ep", ".", "_route_name", ":", "document_endpoint", "(", "ep", ")", "for", "ep", "in", "module_endpoints", "}", "ep_doc_filtered", "=", "{", "}", "for", "endpoint", "in", "ep_doc", ":", "# check if user has permission to view this endpoint", "# this is currently an on/off switch: if any endpoint has a \"@requires_permission\"", "# decorator, user.is_admin must be True for the user to see documentation", "# @TODO - make this permission more granular if necessary", "if", "ep_doc", "[", "endpoint", "]", ".", "get", "(", "\"requires_permission\"", ")", "!=", "None", "and", "user", "!=", "None", "and", "user", ".", "is_admin", "or", "ep_doc", "[", "endpoint", "]", ".", "get", "(", "\"requires_permission\"", ")", "==", "None", ":", "ep_doc_filtered", "[", "endpoint", "]", "=", "ep_doc", "[", "endpoint", "]", "module_resources", "=", "extract_resources", "(", "module", ")", "res_doc", "=", "{", "r", ".", "_value_type", ":", "document_resource", "(", "r", ")", "for", "r", "in", "module_resources", "}", "return", "{", "'endpoints'", ":", "ep_doc_filtered", ",", "'resources'", ":", "res_doc", "}" ]
Compile a Pale module's documentation into a python dictionary. The returned dictionary is suitable to be rendered by a JSON formatter, or passed to a template engine, or manipulated in some other way.
[ "Compile", "a", "Pale", "module", "s", "documentation", "into", "a", "python", "dictionary", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L781-L815
train
Loudr/pale
pale/doc.py
document_endpoint
def document_endpoint(endpoint): """Extract the full documentation dictionary from the endpoint.""" descr = clean_description(py_doc_trim(endpoint.__doc__)) docs = { 'name': endpoint._route_name, 'http_method': endpoint._http_method, 'uri': endpoint._uri, 'description': descr, 'arguments': extract_endpoint_arguments(endpoint), 'returns': format_endpoint_returns_doc(endpoint), } if hasattr(endpoint, "_success"): docs["success"] = endpoint._success if hasattr(endpoint, "_requires_permission"): docs["requires_permission"] = endpoint._requires_permission return docs
python
def document_endpoint(endpoint): """Extract the full documentation dictionary from the endpoint.""" descr = clean_description(py_doc_trim(endpoint.__doc__)) docs = { 'name': endpoint._route_name, 'http_method': endpoint._http_method, 'uri': endpoint._uri, 'description': descr, 'arguments': extract_endpoint_arguments(endpoint), 'returns': format_endpoint_returns_doc(endpoint), } if hasattr(endpoint, "_success"): docs["success"] = endpoint._success if hasattr(endpoint, "_requires_permission"): docs["requires_permission"] = endpoint._requires_permission return docs
[ "def", "document_endpoint", "(", "endpoint", ")", ":", "descr", "=", "clean_description", "(", "py_doc_trim", "(", "endpoint", ".", "__doc__", ")", ")", "docs", "=", "{", "'name'", ":", "endpoint", ".", "_route_name", ",", "'http_method'", ":", "endpoint", ".", "_http_method", ",", "'uri'", ":", "endpoint", ".", "_uri", ",", "'description'", ":", "descr", ",", "'arguments'", ":", "extract_endpoint_arguments", "(", "endpoint", ")", ",", "'returns'", ":", "format_endpoint_returns_doc", "(", "endpoint", ")", ",", "}", "if", "hasattr", "(", "endpoint", ",", "\"_success\"", ")", ":", "docs", "[", "\"success\"", "]", "=", "endpoint", ".", "_success", "if", "hasattr", "(", "endpoint", ",", "\"_requires_permission\"", ")", ":", "docs", "[", "\"requires_permission\"", "]", "=", "endpoint", ".", "_requires_permission", "return", "docs" ]
Extract the full documentation dictionary from the endpoint.
[ "Extract", "the", "full", "documentation", "dictionary", "from", "the", "endpoint", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L818-L833
train
Loudr/pale
pale/doc.py
extract_endpoint_arguments
def extract_endpoint_arguments(endpoint): """Extract the argument documentation from the endpoint.""" ep_args = endpoint._arguments if ep_args is None: return None arg_docs = { k: format_endpoint_argument_doc(a) \ for k, a in ep_args.iteritems() } return arg_docs
python
def extract_endpoint_arguments(endpoint): """Extract the argument documentation from the endpoint.""" ep_args = endpoint._arguments if ep_args is None: return None arg_docs = { k: format_endpoint_argument_doc(a) \ for k, a in ep_args.iteritems() } return arg_docs
[ "def", "extract_endpoint_arguments", "(", "endpoint", ")", ":", "ep_args", "=", "endpoint", ".", "_arguments", "if", "ep_args", "is", "None", ":", "return", "None", "arg_docs", "=", "{", "k", ":", "format_endpoint_argument_doc", "(", "a", ")", "for", "k", ",", "a", "in", "ep_args", ".", "iteritems", "(", ")", "}", "return", "arg_docs" ]
Extract the argument documentation from the endpoint.
[ "Extract", "the", "argument", "documentation", "from", "the", "endpoint", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L836-L845
train
Loudr/pale
pale/doc.py
format_endpoint_argument_doc
def format_endpoint_argument_doc(argument): """Return documentation about the argument that an endpoint accepts.""" doc = argument.doc_dict() # Trim the strings a bit doc['description'] = clean_description(py_doc_trim(doc['description'])) details = doc.get('detailed_description', None) if details is not None: doc['detailed_description'] = clean_description(py_doc_trim(details)) return doc
python
def format_endpoint_argument_doc(argument): """Return documentation about the argument that an endpoint accepts.""" doc = argument.doc_dict() # Trim the strings a bit doc['description'] = clean_description(py_doc_trim(doc['description'])) details = doc.get('detailed_description', None) if details is not None: doc['detailed_description'] = clean_description(py_doc_trim(details)) return doc
[ "def", "format_endpoint_argument_doc", "(", "argument", ")", ":", "doc", "=", "argument", ".", "doc_dict", "(", ")", "# Trim the strings a bit", "doc", "[", "'description'", "]", "=", "clean_description", "(", "py_doc_trim", "(", "doc", "[", "'description'", "]", ")", ")", "details", "=", "doc", ".", "get", "(", "'detailed_description'", ",", "None", ")", "if", "details", "is", "not", "None", ":", "doc", "[", "'detailed_description'", "]", "=", "clean_description", "(", "py_doc_trim", "(", "details", ")", ")", "return", "doc" ]
Return documentation about the argument that an endpoint accepts.
[ "Return", "documentation", "about", "the", "argument", "that", "an", "endpoint", "accepts", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L848-L858
train
Loudr/pale
pale/doc.py
format_endpoint_returns_doc
def format_endpoint_returns_doc(endpoint): """Return documentation about the resource that an endpoint returns.""" description = clean_description(py_doc_trim(endpoint._returns._description)) return { 'description': description, 'resource_name': endpoint._returns._value_type, 'resource_type': endpoint._returns.__class__.__name__ }
python
def format_endpoint_returns_doc(endpoint): """Return documentation about the resource that an endpoint returns.""" description = clean_description(py_doc_trim(endpoint._returns._description)) return { 'description': description, 'resource_name': endpoint._returns._value_type, 'resource_type': endpoint._returns.__class__.__name__ }
[ "def", "format_endpoint_returns_doc", "(", "endpoint", ")", ":", "description", "=", "clean_description", "(", "py_doc_trim", "(", "endpoint", ".", "_returns", ".", "_description", ")", ")", "return", "{", "'description'", ":", "description", ",", "'resource_name'", ":", "endpoint", ".", "_returns", ".", "_value_type", ",", "'resource_type'", ":", "endpoint", ".", "_returns", ".", "__class__", ".", "__name__", "}" ]
Return documentation about the resource that an endpoint returns.
[ "Return", "documentation", "about", "the", "resource", "that", "an", "endpoint", "returns", "." ]
dc002ee6032c856551143af222ff8f71ed9853fe
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L861-L868
train
tBaxter/tango-contact-manager
build/lib/contact_manager/models.py
Contact.save
def save(self, *args, **kwargs): """ Create formatted version of body text. """ self.body_formatted = sanetize_text(self.body) super(Contact, self).save()
python
def save(self, *args, **kwargs): """ Create formatted version of body text. """ self.body_formatted = sanetize_text(self.body) super(Contact, self).save()
[ "def", "save", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "body_formatted", "=", "sanetize_text", "(", "self", ".", "body", ")", "super", "(", "Contact", ",", "self", ")", ".", "save", "(", ")" ]
Create formatted version of body text.
[ "Create", "formatted", "version", "of", "body", "text", "." ]
7bd5be326a8db8f438cdefff0fbd14849d0474a5
https://github.com/tBaxter/tango-contact-manager/blob/7bd5be326a8db8f438cdefff0fbd14849d0474a5/build/lib/contact_manager/models.py#L239-L244
train
AASHE/python-membersuite-api-client
membersuite_api_client/memberships/services.py
MembershipService.get_current_membership_for_org
def get_current_membership_for_org(self, account_num, verbose=False): """Return a current membership for this org, or, None if there is none. """ all_memberships = self.get_memberships_for_org( account_num=account_num, verbose=verbose) # Look for first membership that hasn't expired yet. for membership in all_memberships: if (membership.expiration_date and membership.expiration_date > datetime.datetime.now()): # noqa return membership # noqa return None
python
def get_current_membership_for_org(self, account_num, verbose=False): """Return a current membership for this org, or, None if there is none. """ all_memberships = self.get_memberships_for_org( account_num=account_num, verbose=verbose) # Look for first membership that hasn't expired yet. for membership in all_memberships: if (membership.expiration_date and membership.expiration_date > datetime.datetime.now()): # noqa return membership # noqa return None
[ "def", "get_current_membership_for_org", "(", "self", ",", "account_num", ",", "verbose", "=", "False", ")", ":", "all_memberships", "=", "self", ".", "get_memberships_for_org", "(", "account_num", "=", "account_num", ",", "verbose", "=", "verbose", ")", "# Look for first membership that hasn't expired yet.", "for", "membership", "in", "all_memberships", ":", "if", "(", "membership", ".", "expiration_date", "and", "membership", ".", "expiration_date", ">", "datetime", ".", "datetime", ".", "now", "(", ")", ")", ":", "# noqa", "return", "membership", "# noqa", "return", "None" ]
Return a current membership for this org, or, None if there is none.
[ "Return", "a", "current", "membership", "for", "this", "org", "or", "None", "if", "there", "is", "none", "." ]
221f5ed8bc7d4424237a4669c5af9edc11819ee9
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L22-L34
train
AASHE/python-membersuite-api-client
membersuite_api_client/memberships/services.py
MembershipService.get_memberships_for_org
def get_memberships_for_org(self, account_num, verbose=False): """ Retrieve all memberships associated with an organization, ordered by expiration date. """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM Membership " \ "WHERE Owner = '%s' ORDER BY ExpirationDate" % account_num membership_list = self.get_long_query(query, verbose=verbose) return membership_list or []
python
def get_memberships_for_org(self, account_num, verbose=False): """ Retrieve all memberships associated with an organization, ordered by expiration date. """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM Membership " \ "WHERE Owner = '%s' ORDER BY ExpirationDate" % account_num membership_list = self.get_long_query(query, verbose=verbose) return membership_list or []
[ "def", "get_memberships_for_org", "(", "self", ",", "account_num", ",", "verbose", "=", "False", ")", ":", "if", "not", "self", ".", "client", ".", "session_id", ":", "self", ".", "client", ".", "request_session", "(", ")", "query", "=", "\"SELECT Objects() FROM Membership \"", "\"WHERE Owner = '%s' ORDER BY ExpirationDate\"", "%", "account_num", "membership_list", "=", "self", ".", "get_long_query", "(", "query", ",", "verbose", "=", "verbose", ")", "return", "membership_list", "or", "[", "]" ]
Retrieve all memberships associated with an organization, ordered by expiration date.
[ "Retrieve", "all", "memberships", "associated", "with", "an", "organization", "ordered", "by", "expiration", "date", "." ]
221f5ed8bc7d4424237a4669c5af9edc11819ee9
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L36-L48
train
AASHE/python-membersuite-api-client
membersuite_api_client/memberships/services.py
MembershipService.get_all_memberships
def get_all_memberships( self, limit_to=100, max_calls=None, parameters=None, since_when=None, start_record=0, verbose=False): """ Retrieve all memberships updated since "since_when" Loop over queries of size limit_to until either a non-full queryset is returned, or max_depth is reached (used in tests). Then the recursion collapses to return a single concatenated list. """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM Membership" # collect all where parameters into a list of # (key, operator, value) tuples where_params = [] if parameters: for k, v in parameters.items(): where_params.append((k, "=", v)) if since_when: d = datetime.date.today() - datetime.timedelta(days=since_when) where_params.append( ('LastModifiedDate', ">", "'%s 00:00:00'" % d)) if where_params: query += " WHERE " query += " AND ".join( ["%s %s %s" % (p[0], p[1], p[2]) for p in where_params]) query += " ORDER BY LocalID" # note, get_long_query is overkill when just looking at # one org, but it still only executes once # `get_long_query` uses `ms_object_to_model` to return Organizations membership_list = self.get_long_query( query, limit_to=limit_to, max_calls=max_calls, start_record=start_record, verbose=verbose) return membership_list or []
python
def get_all_memberships( self, limit_to=100, max_calls=None, parameters=None, since_when=None, start_record=0, verbose=False): """ Retrieve all memberships updated since "since_when" Loop over queries of size limit_to until either a non-full queryset is returned, or max_depth is reached (used in tests). Then the recursion collapses to return a single concatenated list. """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM Membership" # collect all where parameters into a list of # (key, operator, value) tuples where_params = [] if parameters: for k, v in parameters.items(): where_params.append((k, "=", v)) if since_when: d = datetime.date.today() - datetime.timedelta(days=since_when) where_params.append( ('LastModifiedDate', ">", "'%s 00:00:00'" % d)) if where_params: query += " WHERE " query += " AND ".join( ["%s %s %s" % (p[0], p[1], p[2]) for p in where_params]) query += " ORDER BY LocalID" # note, get_long_query is overkill when just looking at # one org, but it still only executes once # `get_long_query` uses `ms_object_to_model` to return Organizations membership_list = self.get_long_query( query, limit_to=limit_to, max_calls=max_calls, start_record=start_record, verbose=verbose) return membership_list or []
[ "def", "get_all_memberships", "(", "self", ",", "limit_to", "=", "100", ",", "max_calls", "=", "None", ",", "parameters", "=", "None", ",", "since_when", "=", "None", ",", "start_record", "=", "0", ",", "verbose", "=", "False", ")", ":", "if", "not", "self", ".", "client", ".", "session_id", ":", "self", ".", "client", ".", "request_session", "(", ")", "query", "=", "\"SELECT Objects() FROM Membership\"", "# collect all where parameters into a list of", "# (key, operator, value) tuples", "where_params", "=", "[", "]", "if", "parameters", ":", "for", "k", ",", "v", "in", "parameters", ".", "items", "(", ")", ":", "where_params", ".", "append", "(", "(", "k", ",", "\"=\"", ",", "v", ")", ")", "if", "since_when", ":", "d", "=", "datetime", ".", "date", ".", "today", "(", ")", "-", "datetime", ".", "timedelta", "(", "days", "=", "since_when", ")", "where_params", ".", "append", "(", "(", "'LastModifiedDate'", ",", "\">\"", ",", "\"'%s 00:00:00'\"", "%", "d", ")", ")", "if", "where_params", ":", "query", "+=", "\" WHERE \"", "query", "+=", "\" AND \"", ".", "join", "(", "[", "\"%s %s %s\"", "%", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ",", "p", "[", "2", "]", ")", "for", "p", "in", "where_params", "]", ")", "query", "+=", "\" ORDER BY LocalID\"", "# note, get_long_query is overkill when just looking at", "# one org, but it still only executes once", "# `get_long_query` uses `ms_object_to_model` to return Organizations", "membership_list", "=", "self", ".", "get_long_query", "(", "query", ",", "limit_to", "=", "limit_to", ",", "max_calls", "=", "max_calls", ",", "start_record", "=", "start_record", ",", "verbose", "=", "verbose", ")", "return", "membership_list", "or", "[", "]" ]
Retrieve all memberships updated since "since_when" Loop over queries of size limit_to until either a non-full queryset is returned, or max_depth is reached (used in tests). Then the recursion collapses to return a single concatenated list.
[ "Retrieve", "all", "memberships", "updated", "since", "since_when" ]
221f5ed8bc7d4424237a4669c5af9edc11819ee9
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L50-L92
train
AASHE/python-membersuite-api-client
membersuite_api_client/memberships/services.py
MembershipProductService.get_all_membership_products
def get_all_membership_products(self, verbose=False): """ Retrieves membership product objects """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM MembershipDuesProduct" membership_product_list = self.get_long_query(query, verbose=verbose) return membership_product_list or []
python
def get_all_membership_products(self, verbose=False): """ Retrieves membership product objects """ if not self.client.session_id: self.client.request_session() query = "SELECT Objects() FROM MembershipDuesProduct" membership_product_list = self.get_long_query(query, verbose=verbose) return membership_product_list or []
[ "def", "get_all_membership_products", "(", "self", ",", "verbose", "=", "False", ")", ":", "if", "not", "self", ".", "client", ".", "session_id", ":", "self", ".", "client", ".", "request_session", "(", ")", "query", "=", "\"SELECT Objects() FROM MembershipDuesProduct\"", "membership_product_list", "=", "self", ".", "get_long_query", "(", "query", ",", "verbose", "=", "verbose", ")", "return", "membership_product_list", "or", "[", "]" ]
Retrieves membership product objects
[ "Retrieves", "membership", "product", "objects" ]
221f5ed8bc7d4424237a4669c5af9edc11819ee9
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L109-L119
train
mangalam-research/selenic
selenic/builder.py
Builder.get_driver
def get_driver(self, desired_capabilities=None): """ Creates a Selenium driver on the basis of the configuration file upon which this object was created. :param desired_capabilities: Capabilities that the caller desires to override. This have priority over those capabilities that are set by the configuration file passed to the builder. :type desired_capabilities: class:`dict` :returns: A driver. :raises ValueError: When it can't figure out how to create a browser as specified by the BROWSER configuration variable. """ override_caps = desired_capabilities or {} desired_capabilities = \ self.config.make_selenium_desired_capabilities() desired_capabilities.update(override_caps) browser_string = self.config.browser chromedriver_version = None if self.remote: driver = self.remote_service.build_driver(desired_capabilities) # There is no equivalent for BrowserStack. if browser_string == "CHROME" and \ self.remote_service.name == "saucelabs": chromedriver_version = \ desired_capabilities.get("chromedriver-version", None) if chromedriver_version is None: raise ValueError( "when using Chrome, you must set a " "``chromedriver-version`` capability so that Selenic " "can detect which version of Chromedriver will " "be used.") else: if browser_string == "CHROME": chromedriver_path = self.local_conf["CHROMEDRIVER_PATH"] driver = webdriver.Chrome( chromedriver_path, chrome_options=self.local_conf.get("CHROME_OPTIONS"), desired_capabilities=desired_capabilities, service_log_path=self.local_conf["SERVICE_LOG_PATH"], service_args=self.local_conf.get("SERVICE_ARGS")) version_line = subprocess.check_output( [chromedriver_path, "--version"]) version_str = re.match(ur"^ChromeDriver (\d+\.\d+)", version_line).group(1) chromedriver_version = StrictVersion(version_str) elif browser_string == "FIREFOX": profile = self.local_conf.get("FIREFOX_PROFILE") or \ FirefoxProfile() binary = self.local_conf.get("FIREFOX_BINARY") or \ FirefoxBinary() driver = webdriver.Firefox(profile, binary, capabilities=desired_capabilities) elif browser_string == "INTERNETEXPLORER": driver = webdriver.Ie() elif browser_string == "OPERA": driver = webdriver.Opera() else: # SAFARI # HTMLUNIT # HTMLUNITWITHJS # IPHONE # IPAD # ANDROID # PHANTOMJS raise ValueError("can't start a local " + browser_string) # Check that what we get is what the config wanted... driver_caps = NormalizedCapabilities(driver.desired_capabilities) browser_version = \ re.sub(r"\..*$", "", driver_caps["browserVersion"]) if driver_caps["platformName"].upper() != self.config.platform: raise ValueError("the platform you want is not the one " "you are running selenic on") if browser_version != self.config.version: raise ValueError("the version installed is not the one " "you wanted") # On BrowserStack we cannot set the version of chromedriver or # query it. So we make the reasonable assuption that the # version of chromedriver is greater than 2.13. (There have # been at least 7 releases after 2.13 at the time of writing.) if (self.remote_service and self.remote_service.name == "browserstack") or \ (chromedriver_version is not None and chromedriver_version > StrictVersion("2.13")): # We patch ActionChains. chromedriver_element_center_patch() # We need to mark the driver as needing the patch. setattr(driver, CHROMEDRIVER_ELEMENT_CENTER_PATCH_FLAG, True) driver = self.patch(driver) return driver
python
def get_driver(self, desired_capabilities=None): """ Creates a Selenium driver on the basis of the configuration file upon which this object was created. :param desired_capabilities: Capabilities that the caller desires to override. This have priority over those capabilities that are set by the configuration file passed to the builder. :type desired_capabilities: class:`dict` :returns: A driver. :raises ValueError: When it can't figure out how to create a browser as specified by the BROWSER configuration variable. """ override_caps = desired_capabilities or {} desired_capabilities = \ self.config.make_selenium_desired_capabilities() desired_capabilities.update(override_caps) browser_string = self.config.browser chromedriver_version = None if self.remote: driver = self.remote_service.build_driver(desired_capabilities) # There is no equivalent for BrowserStack. if browser_string == "CHROME" and \ self.remote_service.name == "saucelabs": chromedriver_version = \ desired_capabilities.get("chromedriver-version", None) if chromedriver_version is None: raise ValueError( "when using Chrome, you must set a " "``chromedriver-version`` capability so that Selenic " "can detect which version of Chromedriver will " "be used.") else: if browser_string == "CHROME": chromedriver_path = self.local_conf["CHROMEDRIVER_PATH"] driver = webdriver.Chrome( chromedriver_path, chrome_options=self.local_conf.get("CHROME_OPTIONS"), desired_capabilities=desired_capabilities, service_log_path=self.local_conf["SERVICE_LOG_PATH"], service_args=self.local_conf.get("SERVICE_ARGS")) version_line = subprocess.check_output( [chromedriver_path, "--version"]) version_str = re.match(ur"^ChromeDriver (\d+\.\d+)", version_line).group(1) chromedriver_version = StrictVersion(version_str) elif browser_string == "FIREFOX": profile = self.local_conf.get("FIREFOX_PROFILE") or \ FirefoxProfile() binary = self.local_conf.get("FIREFOX_BINARY") or \ FirefoxBinary() driver = webdriver.Firefox(profile, binary, capabilities=desired_capabilities) elif browser_string == "INTERNETEXPLORER": driver = webdriver.Ie() elif browser_string == "OPERA": driver = webdriver.Opera() else: # SAFARI # HTMLUNIT # HTMLUNITWITHJS # IPHONE # IPAD # ANDROID # PHANTOMJS raise ValueError("can't start a local " + browser_string) # Check that what we get is what the config wanted... driver_caps = NormalizedCapabilities(driver.desired_capabilities) browser_version = \ re.sub(r"\..*$", "", driver_caps["browserVersion"]) if driver_caps["platformName"].upper() != self.config.platform: raise ValueError("the platform you want is not the one " "you are running selenic on") if browser_version != self.config.version: raise ValueError("the version installed is not the one " "you wanted") # On BrowserStack we cannot set the version of chromedriver or # query it. So we make the reasonable assuption that the # version of chromedriver is greater than 2.13. (There have # been at least 7 releases after 2.13 at the time of writing.) if (self.remote_service and self.remote_service.name == "browserstack") or \ (chromedriver_version is not None and chromedriver_version > StrictVersion("2.13")): # We patch ActionChains. chromedriver_element_center_patch() # We need to mark the driver as needing the patch. setattr(driver, CHROMEDRIVER_ELEMENT_CENTER_PATCH_FLAG, True) driver = self.patch(driver) return driver
[ "def", "get_driver", "(", "self", ",", "desired_capabilities", "=", "None", ")", ":", "override_caps", "=", "desired_capabilities", "or", "{", "}", "desired_capabilities", "=", "self", ".", "config", ".", "make_selenium_desired_capabilities", "(", ")", "desired_capabilities", ".", "update", "(", "override_caps", ")", "browser_string", "=", "self", ".", "config", ".", "browser", "chromedriver_version", "=", "None", "if", "self", ".", "remote", ":", "driver", "=", "self", ".", "remote_service", ".", "build_driver", "(", "desired_capabilities", ")", "# There is no equivalent for BrowserStack.", "if", "browser_string", "==", "\"CHROME\"", "and", "self", ".", "remote_service", ".", "name", "==", "\"saucelabs\"", ":", "chromedriver_version", "=", "desired_capabilities", ".", "get", "(", "\"chromedriver-version\"", ",", "None", ")", "if", "chromedriver_version", "is", "None", ":", "raise", "ValueError", "(", "\"when using Chrome, you must set a \"", "\"``chromedriver-version`` capability so that Selenic \"", "\"can detect which version of Chromedriver will \"", "\"be used.\"", ")", "else", ":", "if", "browser_string", "==", "\"CHROME\"", ":", "chromedriver_path", "=", "self", ".", "local_conf", "[", "\"CHROMEDRIVER_PATH\"", "]", "driver", "=", "webdriver", ".", "Chrome", "(", "chromedriver_path", ",", "chrome_options", "=", "self", ".", "local_conf", ".", "get", "(", "\"CHROME_OPTIONS\"", ")", ",", "desired_capabilities", "=", "desired_capabilities", ",", "service_log_path", "=", "self", ".", "local_conf", "[", "\"SERVICE_LOG_PATH\"", "]", ",", "service_args", "=", "self", ".", "local_conf", ".", "get", "(", "\"SERVICE_ARGS\"", ")", ")", "version_line", "=", "subprocess", ".", "check_output", "(", "[", "chromedriver_path", ",", "\"--version\"", "]", ")", "version_str", "=", "re", ".", "match", "(", "ur\"^ChromeDriver (\\d+\\.\\d+)\"", ",", "version_line", ")", ".", "group", "(", "1", ")", "chromedriver_version", "=", "StrictVersion", "(", "version_str", ")", "elif", "browser_string", "==", "\"FIREFOX\"", ":", "profile", "=", "self", ".", "local_conf", ".", "get", "(", "\"FIREFOX_PROFILE\"", ")", "or", "FirefoxProfile", "(", ")", "binary", "=", "self", ".", "local_conf", ".", "get", "(", "\"FIREFOX_BINARY\"", ")", "or", "FirefoxBinary", "(", ")", "driver", "=", "webdriver", ".", "Firefox", "(", "profile", ",", "binary", ",", "capabilities", "=", "desired_capabilities", ")", "elif", "browser_string", "==", "\"INTERNETEXPLORER\"", ":", "driver", "=", "webdriver", ".", "Ie", "(", ")", "elif", "browser_string", "==", "\"OPERA\"", ":", "driver", "=", "webdriver", ".", "Opera", "(", ")", "else", ":", "# SAFARI", "# HTMLUNIT", "# HTMLUNITWITHJS", "# IPHONE", "# IPAD", "# ANDROID", "# PHANTOMJS", "raise", "ValueError", "(", "\"can't start a local \"", "+", "browser_string", ")", "# Check that what we get is what the config wanted...", "driver_caps", "=", "NormalizedCapabilities", "(", "driver", ".", "desired_capabilities", ")", "browser_version", "=", "re", ".", "sub", "(", "r\"\\..*$\"", ",", "\"\"", ",", "driver_caps", "[", "\"browserVersion\"", "]", ")", "if", "driver_caps", "[", "\"platformName\"", "]", ".", "upper", "(", ")", "!=", "self", ".", "config", ".", "platform", ":", "raise", "ValueError", "(", "\"the platform you want is not the one \"", "\"you are running selenic on\"", ")", "if", "browser_version", "!=", "self", ".", "config", ".", "version", ":", "raise", "ValueError", "(", "\"the version installed is not the one \"", "\"you wanted\"", ")", "# On BrowserStack we cannot set the version of chromedriver or", "# query it. So we make the reasonable assuption that the", "# version of chromedriver is greater than 2.13. (There have", "# been at least 7 releases after 2.13 at the time of writing.)", "if", "(", "self", ".", "remote_service", "and", "self", ".", "remote_service", ".", "name", "==", "\"browserstack\"", ")", "or", "(", "chromedriver_version", "is", "not", "None", "and", "chromedriver_version", ">", "StrictVersion", "(", "\"2.13\"", ")", ")", ":", "# We patch ActionChains.", "chromedriver_element_center_patch", "(", ")", "# We need to mark the driver as needing the patch.", "setattr", "(", "driver", ",", "CHROMEDRIVER_ELEMENT_CENTER_PATCH_FLAG", ",", "True", ")", "driver", "=", "self", ".", "patch", "(", "driver", ")", "return", "driver" ]
Creates a Selenium driver on the basis of the configuration file upon which this object was created. :param desired_capabilities: Capabilities that the caller desires to override. This have priority over those capabilities that are set by the configuration file passed to the builder. :type desired_capabilities: class:`dict` :returns: A driver. :raises ValueError: When it can't figure out how to create a browser as specified by the BROWSER configuration variable.
[ "Creates", "a", "Selenium", "driver", "on", "the", "basis", "of", "the", "configuration", "file", "upon", "which", "this", "object", "was", "created", "." ]
2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad
https://github.com/mangalam-research/selenic/blob/2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad/selenic/builder.py#L73-L171
train
mangalam-research/selenic
selenic/builder.py
Builder.update_ff_binary_env
def update_ff_binary_env(self, variable): """ If a ``FIREFOX_BINARY`` was specified, this method updates an environment variable used by the ``FirefoxBinary`` instance to the current value of the variable in the environment. This method is a no-op if ``FIREFOX_BINARY`` has not been specified or if the configured browser is not Firefox. A common use-case for this method is updating ``DISPLAY`` once an Xvfb or Xephyr instance has been launched. Typically, by the time these displays are launched, the configuration file has already been loaded and whatever ``FirefoxBinary`` instance was created for ``FIREFOX_BINARY`` has a stale ``DISPLAY`` value. :param variable: The name of the variable to update. :type variable: :class:`str` """ if self.config.browser != 'FIREFOX': return binary = self.local_conf.get('FIREFOX_BINARY') if binary is None: return # pylint: disable=protected-access binary._firefox_env[variable] = os.environ[variable]
python
def update_ff_binary_env(self, variable): """ If a ``FIREFOX_BINARY`` was specified, this method updates an environment variable used by the ``FirefoxBinary`` instance to the current value of the variable in the environment. This method is a no-op if ``FIREFOX_BINARY`` has not been specified or if the configured browser is not Firefox. A common use-case for this method is updating ``DISPLAY`` once an Xvfb or Xephyr instance has been launched. Typically, by the time these displays are launched, the configuration file has already been loaded and whatever ``FirefoxBinary`` instance was created for ``FIREFOX_BINARY`` has a stale ``DISPLAY`` value. :param variable: The name of the variable to update. :type variable: :class:`str` """ if self.config.browser != 'FIREFOX': return binary = self.local_conf.get('FIREFOX_BINARY') if binary is None: return # pylint: disable=protected-access binary._firefox_env[variable] = os.environ[variable]
[ "def", "update_ff_binary_env", "(", "self", ",", "variable", ")", ":", "if", "self", ".", "config", ".", "browser", "!=", "'FIREFOX'", ":", "return", "binary", "=", "self", ".", "local_conf", ".", "get", "(", "'FIREFOX_BINARY'", ")", "if", "binary", "is", "None", ":", "return", "# pylint: disable=protected-access", "binary", ".", "_firefox_env", "[", "variable", "]", "=", "os", ".", "environ", "[", "variable", "]" ]
If a ``FIREFOX_BINARY`` was specified, this method updates an environment variable used by the ``FirefoxBinary`` instance to the current value of the variable in the environment. This method is a no-op if ``FIREFOX_BINARY`` has not been specified or if the configured browser is not Firefox. A common use-case for this method is updating ``DISPLAY`` once an Xvfb or Xephyr instance has been launched. Typically, by the time these displays are launched, the configuration file has already been loaded and whatever ``FirefoxBinary`` instance was created for ``FIREFOX_BINARY`` has a stale ``DISPLAY`` value. :param variable: The name of the variable to update. :type variable: :class:`str`
[ "If", "a", "FIREFOX_BINARY", "was", "specified", "this", "method", "updates", "an", "environment", "variable", "used", "by", "the", "FirefoxBinary", "instance", "to", "the", "current", "value", "of", "the", "variable", "in", "the", "environment", "." ]
2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad
https://github.com/mangalam-research/selenic/blob/2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad/selenic/builder.py#L173-L200
train
projectshift/shift-schema
shiftschema/validators/url.py
Url.regex
def regex(self, protocols, localhost=True): """ URL Validation regex Based on regular expression by Diego Perini (@dperini) and provided under MIT License: https://gist.github.com/dperini/729294 :return: """ p = r"^" # protocol p += r"(?:(?:(?:{}):)?//)".format('|'.join(protocols)) # basic auth (optional) p += r"(?:\S+(?::\S*)?@)?" p += r"(?:" # ip exclusion: private and local networks p += r"(?!(?:10|127)(?:\.\d{1,3}){3})" p += r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" p += r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" # ip excluding loopback (0.0.0.0), reserved space (244.0.0.0) # and network/broadcast addresses p += r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" p += r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" p += r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" p += r"|" # hostname p += r"(?:" p += r"(?:" p += r"[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?" p += r"[a-z0-9\u00a1-\uffff]" p += r"\." if not localhost else r"[\.]?|localhost" p += r")+" # tld p += r"(?:[a-z\u00a1-\uffff]{2,}\.?)" p += r")" # port (optional) p += r"(?::\d{2,5})?" # path (optional) p += r"(?:[/?#]\S*)?" p += r"$" return p
python
def regex(self, protocols, localhost=True): """ URL Validation regex Based on regular expression by Diego Perini (@dperini) and provided under MIT License: https://gist.github.com/dperini/729294 :return: """ p = r"^" # protocol p += r"(?:(?:(?:{}):)?//)".format('|'.join(protocols)) # basic auth (optional) p += r"(?:\S+(?::\S*)?@)?" p += r"(?:" # ip exclusion: private and local networks p += r"(?!(?:10|127)(?:\.\d{1,3}){3})" p += r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" p += r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" # ip excluding loopback (0.0.0.0), reserved space (244.0.0.0) # and network/broadcast addresses p += r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" p += r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" p += r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" p += r"|" # hostname p += r"(?:" p += r"(?:" p += r"[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?" p += r"[a-z0-9\u00a1-\uffff]" p += r"\." if not localhost else r"[\.]?|localhost" p += r")+" # tld p += r"(?:[a-z\u00a1-\uffff]{2,}\.?)" p += r")" # port (optional) p += r"(?::\d{2,5})?" # path (optional) p += r"(?:[/?#]\S*)?" p += r"$" return p
[ "def", "regex", "(", "self", ",", "protocols", ",", "localhost", "=", "True", ")", ":", "p", "=", "r\"^\"", "# protocol", "p", "+=", "r\"(?:(?:(?:{}):)?//)\"", ".", "format", "(", "'|'", ".", "join", "(", "protocols", ")", ")", "# basic auth (optional)", "p", "+=", "r\"(?:\\S+(?::\\S*)?@)?\"", "p", "+=", "r\"(?:\"", "# ip exclusion: private and local networks", "p", "+=", "r\"(?!(?:10|127)(?:\\.\\d{1,3}){3})\"", "p", "+=", "r\"(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})\"", "p", "+=", "r\"(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})\"", "# ip excluding loopback (0.0.0.0), reserved space (244.0.0.0)", "# and network/broadcast addresses", "p", "+=", "r\"(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])\"", "p", "+=", "r\"(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}\"", "p", "+=", "r\"(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))\"", "p", "+=", "r\"|\"", "# hostname", "p", "+=", "r\"(?:\"", "p", "+=", "r\"(?:\"", "p", "+=", "r\"[a-z0-9\\u00a1-\\uffff][a-z0-9\\u00a1-\\uffff_-]{0,62})?\"", "p", "+=", "r\"[a-z0-9\\u00a1-\\uffff]\"", "p", "+=", "r\"\\.\"", "if", "not", "localhost", "else", "r\"[\\.]?|localhost\"", "p", "+=", "r\")+\"", "# tld", "p", "+=", "r\"(?:[a-z\\u00a1-\\uffff]{2,}\\.?)\"", "p", "+=", "r\")\"", "# port (optional)", "p", "+=", "r\"(?::\\d{2,5})?\"", "# path (optional)", "p", "+=", "r\"(?:[/?#]\\S*)?\"", "p", "+=", "r\"$\"", "return", "p" ]
URL Validation regex Based on regular expression by Diego Perini (@dperini) and provided under MIT License: https://gist.github.com/dperini/729294 :return:
[ "URL", "Validation", "regex", "Based", "on", "regular", "expression", "by", "Diego", "Perini", "(" ]
07787b540d3369bb37217ffbfbe629118edaf0eb
https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/validators/url.py#L68-L118
train
a1ezzz/wasp-general
wasp_general/network/messenger/onion.py
WMessengerOnion.add_layers
def add_layers(self, *layers): """ Append given layers to this onion :param layers: layer to add :return: None """ for layer in layers: if layer.name() in self.__layers.keys(): raise ValueError('Layer "%s" already exists' % layer.name()) self.__layers[layer.name()] = layer
python
def add_layers(self, *layers): """ Append given layers to this onion :param layers: layer to add :return: None """ for layer in layers: if layer.name() in self.__layers.keys(): raise ValueError('Layer "%s" already exists' % layer.name()) self.__layers[layer.name()] = layer
[ "def", "add_layers", "(", "self", ",", "*", "layers", ")", ":", "for", "layer", "in", "layers", ":", "if", "layer", ".", "name", "(", ")", "in", "self", ".", "__layers", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'Layer \"%s\" already exists'", "%", "layer", ".", "name", "(", ")", ")", "self", ".", "__layers", "[", "layer", ".", "name", "(", ")", "]", "=", "layer" ]
Append given layers to this onion :param layers: layer to add :return: None
[ "Append", "given", "layers", "to", "this", "onion" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/messenger/onion.py#L86-L95
train
numberoverzero/declare
declare.py
index
def index(objects, attr): """ Generate a mapping of a list of objects indexed by the given attr. Parameters ---------- objects : :class:`list`, iterable attr : string The attribute to index the list of objects by Returns ------- dictionary : dict keys are the value of each object's attr, and values are from objects Example ------- class Person(object): def __init__(self, name, email, age): self.name = name self.email = email self.age = age people = [ Person('one', '[email protected]', 1), Person('two', '[email protected]', 2), Person('three', '[email protected]', 3) ] by_email = index(people, 'email') by_name = index(people, 'name') assert by_name['one'] is people[0] assert by_email['[email protected]'] is people[1] """ with warnings.catch_warnings(): warnings.simplefilter("ignore") return {getattr(obj, attr): obj for obj in objects}
python
def index(objects, attr): """ Generate a mapping of a list of objects indexed by the given attr. Parameters ---------- objects : :class:`list`, iterable attr : string The attribute to index the list of objects by Returns ------- dictionary : dict keys are the value of each object's attr, and values are from objects Example ------- class Person(object): def __init__(self, name, email, age): self.name = name self.email = email self.age = age people = [ Person('one', '[email protected]', 1), Person('two', '[email protected]', 2), Person('three', '[email protected]', 3) ] by_email = index(people, 'email') by_name = index(people, 'name') assert by_name['one'] is people[0] assert by_email['[email protected]'] is people[1] """ with warnings.catch_warnings(): warnings.simplefilter("ignore") return {getattr(obj, attr): obj for obj in objects}
[ "def", "index", "(", "objects", ",", "attr", ")", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "simplefilter", "(", "\"ignore\"", ")", "return", "{", "getattr", "(", "obj", ",", "attr", ")", ":", "obj", "for", "obj", "in", "objects", "}" ]
Generate a mapping of a list of objects indexed by the given attr. Parameters ---------- objects : :class:`list`, iterable attr : string The attribute to index the list of objects by Returns ------- dictionary : dict keys are the value of each object's attr, and values are from objects Example ------- class Person(object): def __init__(self, name, email, age): self.name = name self.email = email self.age = age people = [ Person('one', '[email protected]', 1), Person('two', '[email protected]', 2), Person('three', '[email protected]', 3) ] by_email = index(people, 'email') by_name = index(people, 'name') assert by_name['one'] is people[0] assert by_email['[email protected]'] is people[1]
[ "Generate", "a", "mapping", "of", "a", "list", "of", "objects", "indexed", "by", "the", "given", "attr", "." ]
1b05ceca91fbdc3e8e770a376c2f070365c425ff
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L428-L467
train
numberoverzero/declare
declare.py
TypeEngine.register
def register(self, typedef): """ Add the typedef to this engine if it is compatible. After registering a :class:`~TypeDefinition`, it will not be bound until :meth:`~TypeEngine.bind` is next called. Nothing will happen when register is called with a typedef that is pending binding or already bound. Otherwise, the engine will ensure it is compatible with the type using :meth:`~TypeEngine.is_compatible` before adding it to the set of unbound types. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef to register with this engine Raises ------ exc : :class:`ValueError` If :meth:`~TypeEngine.is_compatible` is falsey """ if typedef in self.bound_types: return if not self.is_compatible(typedef): raise ValueError("Incompatible type {} for engine {}".format( typedef, self)) if typedef not in self.unbound_types: self.unbound_types.add(typedef) typedef._register(self)
python
def register(self, typedef): """ Add the typedef to this engine if it is compatible. After registering a :class:`~TypeDefinition`, it will not be bound until :meth:`~TypeEngine.bind` is next called. Nothing will happen when register is called with a typedef that is pending binding or already bound. Otherwise, the engine will ensure it is compatible with the type using :meth:`~TypeEngine.is_compatible` before adding it to the set of unbound types. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef to register with this engine Raises ------ exc : :class:`ValueError` If :meth:`~TypeEngine.is_compatible` is falsey """ if typedef in self.bound_types: return if not self.is_compatible(typedef): raise ValueError("Incompatible type {} for engine {}".format( typedef, self)) if typedef not in self.unbound_types: self.unbound_types.add(typedef) typedef._register(self)
[ "def", "register", "(", "self", ",", "typedef", ")", ":", "if", "typedef", "in", "self", ".", "bound_types", ":", "return", "if", "not", "self", ".", "is_compatible", "(", "typedef", ")", ":", "raise", "ValueError", "(", "\"Incompatible type {} for engine {}\"", ".", "format", "(", "typedef", ",", "self", ")", ")", "if", "typedef", "not", "in", "self", ".", "unbound_types", ":", "self", ".", "unbound_types", ".", "add", "(", "typedef", ")", "typedef", ".", "_register", "(", "self", ")" ]
Add the typedef to this engine if it is compatible. After registering a :class:`~TypeDefinition`, it will not be bound until :meth:`~TypeEngine.bind` is next called. Nothing will happen when register is called with a typedef that is pending binding or already bound. Otherwise, the engine will ensure it is compatible with the type using :meth:`~TypeEngine.is_compatible` before adding it to the set of unbound types. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef to register with this engine Raises ------ exc : :class:`ValueError` If :meth:`~TypeEngine.is_compatible` is falsey
[ "Add", "the", "typedef", "to", "this", "engine", "if", "it", "is", "compatible", "." ]
1b05ceca91fbdc3e8e770a376c2f070365c425ff
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L73-L103
train
numberoverzero/declare
declare.py
TypeEngine.bind
def bind(self, **config): """ Bind all unbound types to the engine. Bind each unbound typedef to the engine, passing in the engine and :attr:`config`. The resulting ``load`` and ``dump`` functions can be found under ``self.bound_types[typedef]["load"]`` and ``self.bound_types[typedef]["dump"], respectively. Parameters ---------- config : dict, optional Engine-binding configuration to pass to each typedef that will be bound. Examples include floating-point precision values, maximum lengths for strings, or any other translation constraints/settings that a typedef needs to construct a load/dump function pair. """ while self.unbound_types: typedef = self.unbound_types.pop() try: load, dump = typedef.bind(self, **config) self.bound_types[typedef] = { "load": load, "dump": dump } except Exception: self.unbound_types.add(typedef) raise
python
def bind(self, **config): """ Bind all unbound types to the engine. Bind each unbound typedef to the engine, passing in the engine and :attr:`config`. The resulting ``load`` and ``dump`` functions can be found under ``self.bound_types[typedef]["load"]`` and ``self.bound_types[typedef]["dump"], respectively. Parameters ---------- config : dict, optional Engine-binding configuration to pass to each typedef that will be bound. Examples include floating-point precision values, maximum lengths for strings, or any other translation constraints/settings that a typedef needs to construct a load/dump function pair. """ while self.unbound_types: typedef = self.unbound_types.pop() try: load, dump = typedef.bind(self, **config) self.bound_types[typedef] = { "load": load, "dump": dump } except Exception: self.unbound_types.add(typedef) raise
[ "def", "bind", "(", "self", ",", "*", "*", "config", ")", ":", "while", "self", ".", "unbound_types", ":", "typedef", "=", "self", ".", "unbound_types", ".", "pop", "(", ")", "try", ":", "load", ",", "dump", "=", "typedef", ".", "bind", "(", "self", ",", "*", "*", "config", ")", "self", ".", "bound_types", "[", "typedef", "]", "=", "{", "\"load\"", ":", "load", ",", "\"dump\"", ":", "dump", "}", "except", "Exception", ":", "self", ".", "unbound_types", ".", "add", "(", "typedef", ")", "raise" ]
Bind all unbound types to the engine. Bind each unbound typedef to the engine, passing in the engine and :attr:`config`. The resulting ``load`` and ``dump`` functions can be found under ``self.bound_types[typedef]["load"]`` and ``self.bound_types[typedef]["dump"], respectively. Parameters ---------- config : dict, optional Engine-binding configuration to pass to each typedef that will be bound. Examples include floating-point precision values, maximum lengths for strings, or any other translation constraints/settings that a typedef needs to construct a load/dump function pair.
[ "Bind", "all", "unbound", "types", "to", "the", "engine", "." ]
1b05ceca91fbdc3e8e770a376c2f070365c425ff
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L105-L132
train
numberoverzero/declare
declare.py
TypeEngine.load
def load(self, typedef, value, **kwargs): """ Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill" """ try: bound_type = self.bound_types[typedef] except KeyError: raise DeclareException( "Can't load unknown type {}".format(typedef)) else: # Don't need to try/catch since load/dump are bound together return bound_type["load"](value, **kwargs)
python
def load(self, typedef, value, **kwargs): """ Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill" """ try: bound_type = self.bound_types[typedef] except KeyError: raise DeclareException( "Can't load unknown type {}".format(typedef)) else: # Don't need to try/catch since load/dump are bound together return bound_type["load"](value, **kwargs)
[ "def", "load", "(", "self", ",", "typedef", ",", "value", ",", "*", "*", "kwargs", ")", ":", "try", ":", "bound_type", "=", "self", ".", "bound_types", "[", "typedef", "]", "except", "KeyError", ":", "raise", "DeclareException", "(", "\"Can't load unknown type {}\"", ".", "format", "(", "typedef", ")", ")", "else", ":", "# Don't need to try/catch since load/dump are bound together", "return", "bound_type", "[", "\"load\"", "]", "(", "value", ",", "*", "*", "kwargs", ")" ]
Return the result of the bound load method for a typedef Looks up the load function that was bound to the engine for a typedef, and return the result of passing the given `value` and any `context` to that function. Parameters ---------- typedef : :class:`~TypeDefinition` The typedef whose bound load method should be used value : object The value to be passed into the bound load method **kwargs : kwargs Context for the value being loaded Returns ------- loaded_value : object The return value of the load function for the input value Raises ------ exc : :class:`KeyError` If the input typedef is not bound to this engine Example ------- .. code-block:: python class Account(TypeDefinition): prefix = "::account" def load(self, value, **context): return value + Account.prefix def dump(self, value, **context): return value[:-len(Account.prefix)] typedef = Account() engine = TypeEngine("accounts") engine.register(typedef) engine.bind() assert engine.dump(typedef, "Jill::account") == "Jill"
[ "Return", "the", "result", "of", "the", "bound", "load", "method", "for", "a", "typedef" ]
1b05ceca91fbdc3e8e770a376c2f070365c425ff
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L134-L188
train
Chilipp/model-organization
model_organization/config.py
get_configdir
def get_configdir(name): """ Return the string representing the configuration directory. The directory is chosen as follows: 1. If the ``name.upper() + CONFIGDIR`` environment variable is supplied, choose that. 2a. On Linux, choose `$HOME/.config`. 2b. On other platforms, choose `$HOME/.matplotlib`. 3. If the chosen directory exists, use that as the configuration directory. 4. A directory: return None. Notes ----- This function is taken from the matplotlib [1] module References ---------- [1]: http://matplotlib.org/api/""" configdir = os.environ.get('%sCONFIGDIR' % name.upper()) if configdir is not None: return os.path.abspath(configdir) p = None h = _get_home() if ((sys.platform.startswith('linux') or sys.platform.startswith('darwin')) and h is not None): p = os.path.join(h, '.config/' + name) elif h is not None: p = os.path.join(h, '.' + name) if not os.path.exists(p): os.makedirs(p) return p
python
def get_configdir(name): """ Return the string representing the configuration directory. The directory is chosen as follows: 1. If the ``name.upper() + CONFIGDIR`` environment variable is supplied, choose that. 2a. On Linux, choose `$HOME/.config`. 2b. On other platforms, choose `$HOME/.matplotlib`. 3. If the chosen directory exists, use that as the configuration directory. 4. A directory: return None. Notes ----- This function is taken from the matplotlib [1] module References ---------- [1]: http://matplotlib.org/api/""" configdir = os.environ.get('%sCONFIGDIR' % name.upper()) if configdir is not None: return os.path.abspath(configdir) p = None h = _get_home() if ((sys.platform.startswith('linux') or sys.platform.startswith('darwin')) and h is not None): p = os.path.join(h, '.config/' + name) elif h is not None: p = os.path.join(h, '.' + name) if not os.path.exists(p): os.makedirs(p) return p
[ "def", "get_configdir", "(", "name", ")", ":", "configdir", "=", "os", ".", "environ", ".", "get", "(", "'%sCONFIGDIR'", "%", "name", ".", "upper", "(", ")", ")", "if", "configdir", "is", "not", "None", ":", "return", "os", ".", "path", ".", "abspath", "(", "configdir", ")", "p", "=", "None", "h", "=", "_get_home", "(", ")", "if", "(", "(", "sys", ".", "platform", ".", "startswith", "(", "'linux'", ")", "or", "sys", ".", "platform", ".", "startswith", "(", "'darwin'", ")", ")", "and", "h", "is", "not", "None", ")", ":", "p", "=", "os", ".", "path", ".", "join", "(", "h", ",", "'.config/'", "+", "name", ")", "elif", "h", "is", "not", "None", ":", "p", "=", "os", ".", "path", ".", "join", "(", "h", ",", "'.'", "+", "name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ":", "os", ".", "makedirs", "(", "p", ")", "return", "p" ]
Return the string representing the configuration directory. The directory is chosen as follows: 1. If the ``name.upper() + CONFIGDIR`` environment variable is supplied, choose that. 2a. On Linux, choose `$HOME/.config`. 2b. On other platforms, choose `$HOME/.matplotlib`. 3. If the chosen directory exists, use that as the configuration directory. 4. A directory: return None. Notes ----- This function is taken from the matplotlib [1] module References ---------- [1]: http://matplotlib.org/api/
[ "Return", "the", "string", "representing", "the", "configuration", "directory", "." ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L45-L83
train
Chilipp/model-organization
model_organization/config.py
ordered_yaml_dump
def ordered_yaml_dump(data, stream=None, Dumper=None, **kwds): """Dumps the stream from an OrderedDict. Taken from http://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml- mappings-as-ordereddicts""" Dumper = Dumper or yaml.Dumper class OrderedDumper(Dumper): pass def _dict_representer(dumper, data): return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) OrderedDumper.add_representer(OrderedDict, _dict_representer) return yaml.dump(data, stream, OrderedDumper, **kwds)
python
def ordered_yaml_dump(data, stream=None, Dumper=None, **kwds): """Dumps the stream from an OrderedDict. Taken from http://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml- mappings-as-ordereddicts""" Dumper = Dumper or yaml.Dumper class OrderedDumper(Dumper): pass def _dict_representer(dumper, data): return dumper.represent_mapping( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) OrderedDumper.add_representer(OrderedDict, _dict_representer) return yaml.dump(data, stream, OrderedDumper, **kwds)
[ "def", "ordered_yaml_dump", "(", "data", ",", "stream", "=", "None", ",", "Dumper", "=", "None", ",", "*", "*", "kwds", ")", ":", "Dumper", "=", "Dumper", "or", "yaml", ".", "Dumper", "class", "OrderedDumper", "(", "Dumper", ")", ":", "pass", "def", "_dict_representer", "(", "dumper", ",", "data", ")", ":", "return", "dumper", ".", "represent_mapping", "(", "yaml", ".", "resolver", ".", "BaseResolver", ".", "DEFAULT_MAPPING_TAG", ",", "data", ".", "items", "(", ")", ")", "OrderedDumper", ".", "add_representer", "(", "OrderedDict", ",", "_dict_representer", ")", "return", "yaml", ".", "dump", "(", "data", ",", "stream", ",", "OrderedDumper", ",", "*", "*", "kwds", ")" ]
Dumps the stream from an OrderedDict. Taken from http://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml- mappings-as-ordereddicts
[ "Dumps", "the", "stream", "from", "an", "OrderedDict", ".", "Taken", "from" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L152-L168
train
Chilipp/model-organization
model_organization/config.py
safe_load
def safe_load(fname): """ Load the file fname and make sure it can be done in parallel Parameters ---------- fname: str The path name """ lock = fasteners.InterProcessLock(fname + '.lck') lock.acquire() try: with open(fname) as f: return ordered_yaml_load(f) except: raise finally: lock.release()
python
def safe_load(fname): """ Load the file fname and make sure it can be done in parallel Parameters ---------- fname: str The path name """ lock = fasteners.InterProcessLock(fname + '.lck') lock.acquire() try: with open(fname) as f: return ordered_yaml_load(f) except: raise finally: lock.release()
[ "def", "safe_load", "(", "fname", ")", ":", "lock", "=", "fasteners", ".", "InterProcessLock", "(", "fname", "+", "'.lck'", ")", "lock", ".", "acquire", "(", ")", "try", ":", "with", "open", "(", "fname", ")", "as", "f", ":", "return", "ordered_yaml_load", "(", "f", ")", "except", ":", "raise", "finally", ":", "lock", ".", "release", "(", ")" ]
Load the file fname and make sure it can be done in parallel Parameters ---------- fname: str The path name
[ "Load", "the", "file", "fname", "and", "make", "sure", "it", "can", "be", "done", "in", "parallel" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L171-L188
train
Chilipp/model-organization
model_organization/config.py
safe_dump
def safe_dump(d, fname, *args, **kwargs): """ Savely dump `d` to `fname` using yaml This method creates a copy of `fname` called ``fname + '~'`` before saving `d` to `fname` using :func:`ordered_yaml_dump` Parameters ---------- d: object The object to dump fname: str The path where to dump `d` Other Parameters ---------------- ``*args, **kwargs`` Will be forwarded to the :func:`ordered_yaml_dump` function """ if osp.exists(fname): os.rename(fname, fname + '~') lock = fasteners.InterProcessLock(fname + '.lck') lock.acquire() try: with open(fname, 'w') as f: ordered_yaml_dump(d, f, *args, **kwargs) except: raise finally: lock.release()
python
def safe_dump(d, fname, *args, **kwargs): """ Savely dump `d` to `fname` using yaml This method creates a copy of `fname` called ``fname + '~'`` before saving `d` to `fname` using :func:`ordered_yaml_dump` Parameters ---------- d: object The object to dump fname: str The path where to dump `d` Other Parameters ---------------- ``*args, **kwargs`` Will be forwarded to the :func:`ordered_yaml_dump` function """ if osp.exists(fname): os.rename(fname, fname + '~') lock = fasteners.InterProcessLock(fname + '.lck') lock.acquire() try: with open(fname, 'w') as f: ordered_yaml_dump(d, f, *args, **kwargs) except: raise finally: lock.release()
[ "def", "safe_dump", "(", "d", ",", "fname", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "osp", ".", "exists", "(", "fname", ")", ":", "os", ".", "rename", "(", "fname", ",", "fname", "+", "'~'", ")", "lock", "=", "fasteners", ".", "InterProcessLock", "(", "fname", "+", "'.lck'", ")", "lock", ".", "acquire", "(", ")", "try", ":", "with", "open", "(", "fname", ",", "'w'", ")", "as", "f", ":", "ordered_yaml_dump", "(", "d", ",", "f", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", ":", "raise", "finally", ":", "lock", ".", "release", "(", ")" ]
Savely dump `d` to `fname` using yaml This method creates a copy of `fname` called ``fname + '~'`` before saving `d` to `fname` using :func:`ordered_yaml_dump` Parameters ---------- d: object The object to dump fname: str The path where to dump `d` Other Parameters ---------------- ``*args, **kwargs`` Will be forwarded to the :func:`ordered_yaml_dump` function
[ "Savely", "dump", "d", "to", "fname", "using", "yaml" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L191-L220
train
Chilipp/model-organization
model_organization/config.py
ExperimentsConfig.project_map
def project_map(self): """A mapping from project name to experiments""" # first update with the experiments in the memory (the others should # already be loaded within the :attr:`exp_files` attribute) for key, val in self.items(): if isinstance(val, dict): l = self._project_map[val['project']] elif isinstance(val, Archive): l = self._project_map[val.project] else: continue if key not in l: l.append(key) return self._project_map
python
def project_map(self): """A mapping from project name to experiments""" # first update with the experiments in the memory (the others should # already be loaded within the :attr:`exp_files` attribute) for key, val in self.items(): if isinstance(val, dict): l = self._project_map[val['project']] elif isinstance(val, Archive): l = self._project_map[val.project] else: continue if key not in l: l.append(key) return self._project_map
[ "def", "project_map", "(", "self", ")", ":", "# first update with the experiments in the memory (the others should", "# already be loaded within the :attr:`exp_files` attribute)", "for", "key", ",", "val", "in", "self", ".", "items", "(", ")", ":", "if", "isinstance", "(", "val", ",", "dict", ")", ":", "l", "=", "self", ".", "_project_map", "[", "val", "[", "'project'", "]", "]", "elif", "isinstance", "(", "val", ",", "Archive", ")", ":", "l", "=", "self", ".", "_project_map", "[", "val", ".", "project", "]", "else", ":", "continue", "if", "key", "not", "in", "l", ":", "l", ".", "append", "(", "key", ")", "return", "self", ".", "_project_map" ]
A mapping from project name to experiments
[ "A", "mapping", "from", "project", "name", "to", "experiments" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L260-L273
train
Chilipp/model-organization
model_organization/config.py
ExperimentsConfig.exp_files
def exp_files(self): """A mapping from experiment to experiment configuration file Note that this attribute only contains experiments whose configuration has already dumped to the file! """ ret = OrderedDict() # restore the order of the experiments exp_file = self.exp_file if osp.exists(exp_file): for key, val in safe_load(exp_file).items(): ret[key] = val for project, d in self.projects.items(): project_path = d['root'] config_path = osp.join(project_path, '.project') if not osp.exists(config_path): continue for fname in glob.glob(osp.join(config_path, '*.yml')): if fname == '.project.yml': continue exp = osp.splitext(osp.basename(fname))[0] if not isinstance(ret.get(exp), Archive): ret[exp] = osp.join(config_path, exp + '.yml') if exp not in self._project_map[project]: self._project_map[project].append(exp) return ret
python
def exp_files(self): """A mapping from experiment to experiment configuration file Note that this attribute only contains experiments whose configuration has already dumped to the file! """ ret = OrderedDict() # restore the order of the experiments exp_file = self.exp_file if osp.exists(exp_file): for key, val in safe_load(exp_file).items(): ret[key] = val for project, d in self.projects.items(): project_path = d['root'] config_path = osp.join(project_path, '.project') if not osp.exists(config_path): continue for fname in glob.glob(osp.join(config_path, '*.yml')): if fname == '.project.yml': continue exp = osp.splitext(osp.basename(fname))[0] if not isinstance(ret.get(exp), Archive): ret[exp] = osp.join(config_path, exp + '.yml') if exp not in self._project_map[project]: self._project_map[project].append(exp) return ret
[ "def", "exp_files", "(", "self", ")", ":", "ret", "=", "OrderedDict", "(", ")", "# restore the order of the experiments", "exp_file", "=", "self", ".", "exp_file", "if", "osp", ".", "exists", "(", "exp_file", ")", ":", "for", "key", ",", "val", "in", "safe_load", "(", "exp_file", ")", ".", "items", "(", ")", ":", "ret", "[", "key", "]", "=", "val", "for", "project", ",", "d", "in", "self", ".", "projects", ".", "items", "(", ")", ":", "project_path", "=", "d", "[", "'root'", "]", "config_path", "=", "osp", ".", "join", "(", "project_path", ",", "'.project'", ")", "if", "not", "osp", ".", "exists", "(", "config_path", ")", ":", "continue", "for", "fname", "in", "glob", ".", "glob", "(", "osp", ".", "join", "(", "config_path", ",", "'*.yml'", ")", ")", ":", "if", "fname", "==", "'.project.yml'", ":", "continue", "exp", "=", "osp", ".", "splitext", "(", "osp", ".", "basename", "(", "fname", ")", ")", "[", "0", "]", "if", "not", "isinstance", "(", "ret", ".", "get", "(", "exp", ")", ",", "Archive", ")", ":", "ret", "[", "exp", "]", "=", "osp", ".", "join", "(", "config_path", ",", "exp", "+", "'.yml'", ")", "if", "exp", "not", "in", "self", ".", "_project_map", "[", "project", "]", ":", "self", ".", "_project_map", "[", "project", "]", ".", "append", "(", "exp", ")", "return", "ret" ]
A mapping from experiment to experiment configuration file Note that this attribute only contains experiments whose configuration has already dumped to the file!
[ "A", "mapping", "from", "experiment", "to", "experiment", "configuration", "file" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L276-L301
train
Chilipp/model-organization
model_organization/config.py
ExperimentsConfig.save
def save(self): """Save the experiment configuration This method stores the configuration of each of the experiments in a file ``'<project-dir>/.project/<experiment>.yml'``, where ``'<project-dir>'`` corresponds to the project directory of the specific ``'<experiment>'``. Furthermore it dumps all experiments to the :attr:`exp_file` configuration file. """ for exp, d in dict(self).items(): if isinstance(d, dict): project_path = self.projects[d['project']]['root'] d = self.rel_paths(copy.deepcopy(d)) fname = osp.join(project_path, '.project', exp + '.yml') if not osp.exists(osp.dirname(fname)): os.makedirs(osp.dirname(fname)) safe_dump(d, fname, default_flow_style=False) exp_file = self.exp_file # to be 100% sure we do not write to the file from multiple processes lock = fasteners.InterProcessLock(exp_file + '.lck') lock.acquire() safe_dump(OrderedDict((exp, val if isinstance(val, Archive) else None) for exp, val in self.items()), exp_file, default_flow_style=False) lock.release()
python
def save(self): """Save the experiment configuration This method stores the configuration of each of the experiments in a file ``'<project-dir>/.project/<experiment>.yml'``, where ``'<project-dir>'`` corresponds to the project directory of the specific ``'<experiment>'``. Furthermore it dumps all experiments to the :attr:`exp_file` configuration file. """ for exp, d in dict(self).items(): if isinstance(d, dict): project_path = self.projects[d['project']]['root'] d = self.rel_paths(copy.deepcopy(d)) fname = osp.join(project_path, '.project', exp + '.yml') if not osp.exists(osp.dirname(fname)): os.makedirs(osp.dirname(fname)) safe_dump(d, fname, default_flow_style=False) exp_file = self.exp_file # to be 100% sure we do not write to the file from multiple processes lock = fasteners.InterProcessLock(exp_file + '.lck') lock.acquire() safe_dump(OrderedDict((exp, val if isinstance(val, Archive) else None) for exp, val in self.items()), exp_file, default_flow_style=False) lock.release()
[ "def", "save", "(", "self", ")", ":", "for", "exp", ",", "d", "in", "dict", "(", "self", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "d", ",", "dict", ")", ":", "project_path", "=", "self", ".", "projects", "[", "d", "[", "'project'", "]", "]", "[", "'root'", "]", "d", "=", "self", ".", "rel_paths", "(", "copy", ".", "deepcopy", "(", "d", ")", ")", "fname", "=", "osp", ".", "join", "(", "project_path", ",", "'.project'", ",", "exp", "+", "'.yml'", ")", "if", "not", "osp", ".", "exists", "(", "osp", ".", "dirname", "(", "fname", ")", ")", ":", "os", ".", "makedirs", "(", "osp", ".", "dirname", "(", "fname", ")", ")", "safe_dump", "(", "d", ",", "fname", ",", "default_flow_style", "=", "False", ")", "exp_file", "=", "self", ".", "exp_file", "# to be 100% sure we do not write to the file from multiple processes", "lock", "=", "fasteners", ".", "InterProcessLock", "(", "exp_file", "+", "'.lck'", ")", "lock", ".", "acquire", "(", ")", "safe_dump", "(", "OrderedDict", "(", "(", "exp", ",", "val", "if", "isinstance", "(", "val", ",", "Archive", ")", "else", "None", ")", "for", "exp", ",", "val", "in", "self", ".", "items", "(", ")", ")", ",", "exp_file", ",", "default_flow_style", "=", "False", ")", "lock", ".", "release", "(", ")" ]
Save the experiment configuration This method stores the configuration of each of the experiments in a file ``'<project-dir>/.project/<experiment>.yml'``, where ``'<project-dir>'`` corresponds to the project directory of the specific ``'<experiment>'``. Furthermore it dumps all experiments to the :attr:`exp_file` configuration file.
[ "Save", "the", "experiment", "configuration" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L447-L471
train
Chilipp/model-organization
model_organization/config.py
ExperimentsConfig.as_ordereddict
def as_ordereddict(self): """Convenience method to convert this object into an OrderedDict""" if six.PY2: d = OrderedDict() copied = dict(self) for key in self: d[key] = copied[key] else: d = OrderedDict(self) return d
python
def as_ordereddict(self): """Convenience method to convert this object into an OrderedDict""" if six.PY2: d = OrderedDict() copied = dict(self) for key in self: d[key] = copied[key] else: d = OrderedDict(self) return d
[ "def", "as_ordereddict", "(", "self", ")", ":", "if", "six", ".", "PY2", ":", "d", "=", "OrderedDict", "(", ")", "copied", "=", "dict", "(", "self", ")", "for", "key", "in", "self", ":", "d", "[", "key", "]", "=", "copied", "[", "key", "]", "else", ":", "d", "=", "OrderedDict", "(", "self", ")", "return", "d" ]
Convenience method to convert this object into an OrderedDict
[ "Convenience", "method", "to", "convert", "this", "object", "into", "an", "OrderedDict" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L480-L489
train
Chilipp/model-organization
model_organization/config.py
ExperimentsConfig.remove
def remove(self, experiment): """Remove the configuration of an experiment""" try: project_path = self.projects[self[experiment]['project']]['root'] except KeyError: return config_path = osp.join(project_path, '.project', experiment + '.yml') for f in [config_path, config_path + '~', config_path + '.lck']: if os.path.exists(f): os.remove(f) del self[experiment]
python
def remove(self, experiment): """Remove the configuration of an experiment""" try: project_path = self.projects[self[experiment]['project']]['root'] except KeyError: return config_path = osp.join(project_path, '.project', experiment + '.yml') for f in [config_path, config_path + '~', config_path + '.lck']: if os.path.exists(f): os.remove(f) del self[experiment]
[ "def", "remove", "(", "self", ",", "experiment", ")", ":", "try", ":", "project_path", "=", "self", ".", "projects", "[", "self", "[", "experiment", "]", "[", "'project'", "]", "]", "[", "'root'", "]", "except", "KeyError", ":", "return", "config_path", "=", "osp", ".", "join", "(", "project_path", ",", "'.project'", ",", "experiment", "+", "'.yml'", ")", "for", "f", "in", "[", "config_path", ",", "config_path", "+", "'~'", ",", "config_path", "+", "'.lck'", "]", ":", "if", "os", ".", "path", ".", "exists", "(", "f", ")", ":", "os", ".", "remove", "(", "f", ")", "del", "self", "[", "experiment", "]" ]
Remove the configuration of an experiment
[ "Remove", "the", "configuration", "of", "an", "experiment" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L519-L529
train
Chilipp/model-organization
model_organization/config.py
ProjectsConfig.save
def save(self): """ Save the project configuration This method dumps the configuration for each project and the project paths (see the :attr:`all_projects` attribute) to the hard drive """ project_paths = OrderedDict() for project, d in OrderedDict(self).items(): if isinstance(d, dict): project_path = d['root'] fname = osp.join(project_path, '.project', '.project.yml') if not osp.exists(osp.dirname(fname)): os.makedirs(osp.dirname(fname)) if osp.exists(fname): os.rename(fname, fname + '~') d = self.rel_paths(copy.deepcopy(d)) safe_dump(d, fname, default_flow_style=False) project_paths[project] = project_path else: project_paths = self.project_paths[project] self.project_paths = project_paths safe_dump(project_paths, self.all_projects, default_flow_style=False)
python
def save(self): """ Save the project configuration This method dumps the configuration for each project and the project paths (see the :attr:`all_projects` attribute) to the hard drive """ project_paths = OrderedDict() for project, d in OrderedDict(self).items(): if isinstance(d, dict): project_path = d['root'] fname = osp.join(project_path, '.project', '.project.yml') if not osp.exists(osp.dirname(fname)): os.makedirs(osp.dirname(fname)) if osp.exists(fname): os.rename(fname, fname + '~') d = self.rel_paths(copy.deepcopy(d)) safe_dump(d, fname, default_flow_style=False) project_paths[project] = project_path else: project_paths = self.project_paths[project] self.project_paths = project_paths safe_dump(project_paths, self.all_projects, default_flow_style=False)
[ "def", "save", "(", "self", ")", ":", "project_paths", "=", "OrderedDict", "(", ")", "for", "project", ",", "d", "in", "OrderedDict", "(", "self", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "d", ",", "dict", ")", ":", "project_path", "=", "d", "[", "'root'", "]", "fname", "=", "osp", ".", "join", "(", "project_path", ",", "'.project'", ",", "'.project.yml'", ")", "if", "not", "osp", ".", "exists", "(", "osp", ".", "dirname", "(", "fname", ")", ")", ":", "os", ".", "makedirs", "(", "osp", ".", "dirname", "(", "fname", ")", ")", "if", "osp", ".", "exists", "(", "fname", ")", ":", "os", ".", "rename", "(", "fname", ",", "fname", "+", "'~'", ")", "d", "=", "self", ".", "rel_paths", "(", "copy", ".", "deepcopy", "(", "d", ")", ")", "safe_dump", "(", "d", ",", "fname", ",", "default_flow_style", "=", "False", ")", "project_paths", "[", "project", "]", "=", "project_path", "else", ":", "project_paths", "=", "self", ".", "project_paths", "[", "project", "]", "self", ".", "project_paths", "=", "project_paths", "safe_dump", "(", "project_paths", ",", "self", ".", "all_projects", ",", "default_flow_style", "=", "False", ")" ]
Save the project configuration This method dumps the configuration for each project and the project paths (see the :attr:`all_projects` attribute) to the hard drive
[ "Save", "the", "project", "configuration" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L691-L713
train
Chilipp/model-organization
model_organization/config.py
Config.save
def save(self): """ Save the entire configuration files """ self.projects.save() self.experiments.save() safe_dump(self.global_config, self._globals_file, default_flow_style=False)
python
def save(self): """ Save the entire configuration files """ self.projects.save() self.experiments.save() safe_dump(self.global_config, self._globals_file, default_flow_style=False)
[ "def", "save", "(", "self", ")", ":", "self", ".", "projects", ".", "save", "(", ")", "self", ".", "experiments", ".", "save", "(", ")", "safe_dump", "(", "self", ".", "global_config", ",", "self", ".", "_globals_file", ",", "default_flow_style", "=", "False", ")" ]
Save the entire configuration files
[ "Save", "the", "entire", "configuration", "files" ]
694d1219c7ed7e1b2b17153afa11bdc21169bca2
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L748-L755
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
UndoSetText.reverseCommit
def reverseCommit(self): """ Replace the current widget content with the original text. Note that the original text has styling information available, whereas the new text does not. """ self.baseClass.setText(self.oldText) self.qteWidget.SCISetStylingEx(0, 0, self.style)
python
def reverseCommit(self): """ Replace the current widget content with the original text. Note that the original text has styling information available, whereas the new text does not. """ self.baseClass.setText(self.oldText) self.qteWidget.SCISetStylingEx(0, 0, self.style)
[ "def", "reverseCommit", "(", "self", ")", ":", "self", ".", "baseClass", ".", "setText", "(", "self", ".", "oldText", ")", "self", ".", "qteWidget", ".", "SCISetStylingEx", "(", "0", ",", "0", ",", "self", ".", "style", ")" ]
Replace the current widget content with the original text. Note that the original text has styling information available, whereas the new text does not.
[ "Replace", "the", "current", "widget", "content", "with", "the", "original", "text", ".", "Note", "that", "the", "original", "text", "has", "styling", "information", "available", "whereas", "the", "new", "text", "does", "not", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L390-L397
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
UndoGenericQtmacsScintilla.placeCursor
def placeCursor(self, line, col): """ Try to place the cursor in ``line`` at ``col`` if possible, otherwise place it at the end. """ num_lines, num_col = self.qteWidget.getNumLinesAndColumns() # Place the cursor at the specified position if possible. if line >= num_lines: line, col = num_lines, num_col else: text = self.qteWidget.text(line) if col >= len(text): col = len(text) - 1 self.qteWidget.setCursorPosition(line, col)
python
def placeCursor(self, line, col): """ Try to place the cursor in ``line`` at ``col`` if possible, otherwise place it at the end. """ num_lines, num_col = self.qteWidget.getNumLinesAndColumns() # Place the cursor at the specified position if possible. if line >= num_lines: line, col = num_lines, num_col else: text = self.qteWidget.text(line) if col >= len(text): col = len(text) - 1 self.qteWidget.setCursorPosition(line, col)
[ "def", "placeCursor", "(", "self", ",", "line", ",", "col", ")", ":", "num_lines", ",", "num_col", "=", "self", ".", "qteWidget", ".", "getNumLinesAndColumns", "(", ")", "# Place the cursor at the specified position if possible.", "if", "line", ">=", "num_lines", ":", "line", ",", "col", "=", "num_lines", ",", "num_col", "else", ":", "text", "=", "self", ".", "qteWidget", ".", "text", "(", "line", ")", "if", "col", ">=", "len", "(", "text", ")", ":", "col", "=", "len", "(", "text", ")", "-", "1", "self", ".", "qteWidget", ".", "setCursorPosition", "(", "line", ",", "col", ")" ]
Try to place the cursor in ``line`` at ``col`` if possible, otherwise place it at the end.
[ "Try", "to", "place", "the", "cursor", "in", "line", "at", "col", "if", "possible", "otherwise", "place", "it", "at", "the", "end", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L441-L456
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
UndoGenericQtmacsScintilla.reverseCommit
def reverseCommit(self): """ Put the document into the 'before' state. """ # Put the document into the 'before' state. self.baseClass.setText(self.textBefore) self.qteWidget.SCISetStylingEx(0, 0, self.styleBefore)
python
def reverseCommit(self): """ Put the document into the 'before' state. """ # Put the document into the 'before' state. self.baseClass.setText(self.textBefore) self.qteWidget.SCISetStylingEx(0, 0, self.styleBefore)
[ "def", "reverseCommit", "(", "self", ")", ":", "# Put the document into the 'before' state.", "self", ".", "baseClass", ".", "setText", "(", "self", ".", "textBefore", ")", "self", ".", "qteWidget", ".", "SCISetStylingEx", "(", "0", ",", "0", ",", "self", ".", "styleBefore", ")" ]
Put the document into the 'before' state.
[ "Put", "the", "document", "into", "the", "before", "state", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L476-L482
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.fromMimeData
def fromMimeData(self, data): """ Paste the clipboard data at the current cursor position. This method also adds another undo-object to the undo-stack. ..note: This method forcefully interrupts the ``QsciInternal`` pasting mechnism by returning an empty MIME data element. This is not an elegant implementation, but the best I could come up with at the moment. """ # Only insert the element if it is available in plain text. if data.hasText(): self.insert(data.text()) # Tell the underlying QsciScintilla object that the MIME data # object was indeed empty. return (QtCore.QByteArray(), False)
python
def fromMimeData(self, data): """ Paste the clipboard data at the current cursor position. This method also adds another undo-object to the undo-stack. ..note: This method forcefully interrupts the ``QsciInternal`` pasting mechnism by returning an empty MIME data element. This is not an elegant implementation, but the best I could come up with at the moment. """ # Only insert the element if it is available in plain text. if data.hasText(): self.insert(data.text()) # Tell the underlying QsciScintilla object that the MIME data # object was indeed empty. return (QtCore.QByteArray(), False)
[ "def", "fromMimeData", "(", "self", ",", "data", ")", ":", "# Only insert the element if it is available in plain text.", "if", "data", ".", "hasText", "(", ")", ":", "self", ".", "insert", "(", "data", ".", "text", "(", ")", ")", "# Tell the underlying QsciScintilla object that the MIME data", "# object was indeed empty.", "return", "(", "QtCore", ".", "QByteArray", "(", ")", ",", "False", ")" ]
Paste the clipboard data at the current cursor position. This method also adds another undo-object to the undo-stack. ..note: This method forcefully interrupts the ``QsciInternal`` pasting mechnism by returning an empty MIME data element. This is not an elegant implementation, but the best I could come up with at the moment.
[ "Paste", "the", "clipboard", "data", "at", "the", "current", "cursor", "position", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L582-L600
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.keyPressEvent
def keyPressEvent(self, keyEvent: QtGui.QKeyEvent): """ Undo safe wrapper for the native ``keyPressEvent`` method. |Args| * ``keyEvent`` (**QKeyEvent**): the key event to process. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsert(self, keyEvent.text()) self.qteUndoStack.push(undoObj)
python
def keyPressEvent(self, keyEvent: QtGui.QKeyEvent): """ Undo safe wrapper for the native ``keyPressEvent`` method. |Args| * ``keyEvent`` (**QKeyEvent**): the key event to process. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsert(self, keyEvent.text()) self.qteUndoStack.push(undoObj)
[ "def", "keyPressEvent", "(", "self", ",", "keyEvent", ":", "QtGui", ".", "QKeyEvent", ")", ":", "undoObj", "=", "UndoInsert", "(", "self", ",", "keyEvent", ".", "text", "(", ")", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")" ]
Undo safe wrapper for the native ``keyPressEvent`` method. |Args| * ``keyEvent`` (**QKeyEvent**): the key event to process. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "keyPressEvent", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L695-L712
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.replaceSelectedText
def replaceSelectedText(self, text: str): """ Undo safe wrapper for the native ``replaceSelectedText`` method. |Args| * ``text`` (**str**): text to replace the current selection. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoReplaceSelectedText(self, text) self.qteUndoStack.push(undoObj)
python
def replaceSelectedText(self, text: str): """ Undo safe wrapper for the native ``replaceSelectedText`` method. |Args| * ``text`` (**str**): text to replace the current selection. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoReplaceSelectedText(self, text) self.qteUndoStack.push(undoObj)
[ "def", "replaceSelectedText", "(", "self", ",", "text", ":", "str", ")", ":", "undoObj", "=", "UndoReplaceSelectedText", "(", "self", ",", "text", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")" ]
Undo safe wrapper for the native ``replaceSelectedText`` method. |Args| * ``text`` (**str**): text to replace the current selection. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "replaceSelectedText", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L735-L752
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.insert
def insert(self, text: str): """ Undo safe wrapper for the native ``insert`` method. |Args| * ``text`` (**str**): text to insert at the current position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsert(self, text) self.qteUndoStack.push(undoObj)
python
def insert(self, text: str): """ Undo safe wrapper for the native ``insert`` method. |Args| * ``text`` (**str**): text to insert at the current position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsert(self, text) self.qteUndoStack.push(undoObj)
[ "def", "insert", "(", "self", ",", "text", ":", "str", ")", ":", "undoObj", "=", "UndoInsert", "(", "self", ",", "text", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")" ]
Undo safe wrapper for the native ``insert`` method. |Args| * ``text`` (**str**): text to insert at the current position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "insert", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L755-L772
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.insertAt
def insertAt(self, text: str, line: int, col: int): """ Undo safe wrapper for the native ``insertAt`` method. |Args| * ``text`` (**str**): text to insert at the specified position. * ``line`` (**int**): line number. * ``col`` (**int**): column number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsertAt(self, text, line, col) self.qteUndoStack.push(undoObj)
python
def insertAt(self, text: str, line: int, col: int): """ Undo safe wrapper for the native ``insertAt`` method. |Args| * ``text`` (**str**): text to insert at the specified position. * ``line`` (**int**): line number. * ``col`` (**int**): column number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoInsertAt(self, text, line, col) self.qteUndoStack.push(undoObj)
[ "def", "insertAt", "(", "self", ",", "text", ":", "str", ",", "line", ":", "int", ",", "col", ":", "int", ")", ":", "undoObj", "=", "UndoInsertAt", "(", "self", ",", "text", ",", "line", ",", "col", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")" ]
Undo safe wrapper for the native ``insertAt`` method. |Args| * ``text`` (**str**): text to insert at the specified position. * ``line`` (**int**): line number. * ``col`` (**int**): column number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "insertAt", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L775-L794
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.append
def append(self, text: str): """ Undo safe wrapper for the native ``append`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ pos = self.getCursorPosition() line, col = self.getNumLinesAndColumns() undoObj = UndoInsertAt(self, text, line, col) self.qteUndoStack.push(undoObj) self.setCursorPosition(*pos)
python
def append(self, text: str): """ Undo safe wrapper for the native ``append`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ pos = self.getCursorPosition() line, col = self.getNumLinesAndColumns() undoObj = UndoInsertAt(self, text, line, col) self.qteUndoStack.push(undoObj) self.setCursorPosition(*pos)
[ "def", "append", "(", "self", ",", "text", ":", "str", ")", ":", "pos", "=", "self", ".", "getCursorPosition", "(", ")", "line", ",", "col", "=", "self", ".", "getNumLinesAndColumns", "(", ")", "undoObj", "=", "UndoInsertAt", "(", "self", ",", "text", ",", "line", ",", "col", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")", "self", ".", "setCursorPosition", "(", "*", "pos", ")" ]
Undo safe wrapper for the native ``append`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "append", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L797-L817
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.setText
def setText(self, text: str): """ Undo safe wrapper for the native ``setText`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoSetText(self, text) self.qteUndoStack.push(undoObj)
python
def setText(self, text: str): """ Undo safe wrapper for the native ``setText`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ undoObj = UndoSetText(self, text) self.qteUndoStack.push(undoObj)
[ "def", "setText", "(", "self", ",", "text", ":", "str", ")", ":", "undoObj", "=", "UndoSetText", "(", "self", ",", "text", ")", "self", ".", "qteUndoStack", ".", "push", "(", "undoObj", ")" ]
Undo safe wrapper for the native ``setText`` method. |Args| * ``text`` (**str**): text to insert at the specified position. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Undo", "safe", "wrapper", "for", "the", "native", "setText", "method", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L820-L837
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.SCIGetStyledText
def SCIGetStyledText(self, selectionPos: tuple): """ Pythonic wrapper for the SCI_GETSTYLEDTEXT command. For example, to get the raw text and styling bits for the first five characters in the widget use:: text, style = SCIGetStyledText((0, 0, 0, 5)) print(text.decode('utf-8')) |Args| * ``selectionPos`` (**tuple**): selection position in the form of (start_line, start_col, end_line, end_col). |Returns| **tuple** of two ``bytearrays``. The first contains the the character bytes and the second the Scintilla styling information. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # Sanity check. if not self.isSelectionPositionValid(selectionPos): return None # Convert the start- and end point of the selection into # stream offsets. Ensure that start comes before end. start = self.positionFromLineIndex(*selectionPos[:2]) end = self.positionFromLineIndex(*selectionPos[2:]) if start > end: start, end = end, start # Allocate a large enough buffer. bufSize = 2 * (end - start) + 2 buf = bytearray(bufSize) # Fetch the text- and styling information. numRet = self.SendScintilla(self.SCI_GETSTYLEDTEXT, start, end, buf) # The last two bytes are always Zero according to the # Scintilla documentation, so remove them. buf = buf[:-2] # Double check that we did not receive more bytes than the buffer # was long. if numRet > bufSize: qteMain.qteLogger.error('SCI_GETSTYLEDTEX function returned more' ' bytes than expected.') text = buf[0::2] style = buf[1::2] return (text, style)
python
def SCIGetStyledText(self, selectionPos: tuple): """ Pythonic wrapper for the SCI_GETSTYLEDTEXT command. For example, to get the raw text and styling bits for the first five characters in the widget use:: text, style = SCIGetStyledText((0, 0, 0, 5)) print(text.decode('utf-8')) |Args| * ``selectionPos`` (**tuple**): selection position in the form of (start_line, start_col, end_line, end_col). |Returns| **tuple** of two ``bytearrays``. The first contains the the character bytes and the second the Scintilla styling information. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # Sanity check. if not self.isSelectionPositionValid(selectionPos): return None # Convert the start- and end point of the selection into # stream offsets. Ensure that start comes before end. start = self.positionFromLineIndex(*selectionPos[:2]) end = self.positionFromLineIndex(*selectionPos[2:]) if start > end: start, end = end, start # Allocate a large enough buffer. bufSize = 2 * (end - start) + 2 buf = bytearray(bufSize) # Fetch the text- and styling information. numRet = self.SendScintilla(self.SCI_GETSTYLEDTEXT, start, end, buf) # The last two bytes are always Zero according to the # Scintilla documentation, so remove them. buf = buf[:-2] # Double check that we did not receive more bytes than the buffer # was long. if numRet > bufSize: qteMain.qteLogger.error('SCI_GETSTYLEDTEX function returned more' ' bytes than expected.') text = buf[0::2] style = buf[1::2] return (text, style)
[ "def", "SCIGetStyledText", "(", "self", ",", "selectionPos", ":", "tuple", ")", ":", "# Sanity check.", "if", "not", "self", ".", "isSelectionPositionValid", "(", "selectionPos", ")", ":", "return", "None", "# Convert the start- and end point of the selection into", "# stream offsets. Ensure that start comes before end.", "start", "=", "self", ".", "positionFromLineIndex", "(", "*", "selectionPos", "[", ":", "2", "]", ")", "end", "=", "self", ".", "positionFromLineIndex", "(", "*", "selectionPos", "[", "2", ":", "]", ")", "if", "start", ">", "end", ":", "start", ",", "end", "=", "end", ",", "start", "# Allocate a large enough buffer.", "bufSize", "=", "2", "*", "(", "end", "-", "start", ")", "+", "2", "buf", "=", "bytearray", "(", "bufSize", ")", "# Fetch the text- and styling information.", "numRet", "=", "self", ".", "SendScintilla", "(", "self", ".", "SCI_GETSTYLEDTEXT", ",", "start", ",", "end", ",", "buf", ")", "# The last two bytes are always Zero according to the", "# Scintilla documentation, so remove them.", "buf", "=", "buf", "[", ":", "-", "2", "]", "# Double check that we did not receive more bytes than the buffer", "# was long.", "if", "numRet", ">", "bufSize", ":", "qteMain", ".", "qteLogger", ".", "error", "(", "'SCI_GETSTYLEDTEX function returned more'", "' bytes than expected.'", ")", "text", "=", "buf", "[", "0", ":", ":", "2", "]", "style", "=", "buf", "[", "1", ":", ":", "2", "]", "return", "(", "text", ",", "style", ")" ]
Pythonic wrapper for the SCI_GETSTYLEDTEXT command. For example, to get the raw text and styling bits for the first five characters in the widget use:: text, style = SCIGetStyledText((0, 0, 0, 5)) print(text.decode('utf-8')) |Args| * ``selectionPos`` (**tuple**): selection position in the form of (start_line, start_col, end_line, end_col). |Returns| **tuple** of two ``bytearrays``. The first contains the the character bytes and the second the Scintilla styling information. |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Pythonic", "wrapper", "for", "the", "SCI_GETSTYLEDTEXT", "command", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L840-L895
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.SCISetStyling
def SCISetStyling(self, line: int, col: int, numChar: int, style: bytearray): """ Pythonic wrapper for the SCI_SETSTYLING command. For example, the following code applies style #3 to the first five characters in the second line of the widget: SCISetStyling((0, 1), 5, 3) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``numChar`` (**int**): number of characters to style. * ``style`` (**int**): Scintilla style number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not self.isPositionValid(line, col): return pos = self.positionFromLineIndex(line, col) self.SendScintilla(self.SCI_STARTSTYLING, pos, 0xFF) self.SendScintilla(self.SCI_SETSTYLING, numChar, style)
python
def SCISetStyling(self, line: int, col: int, numChar: int, style: bytearray): """ Pythonic wrapper for the SCI_SETSTYLING command. For example, the following code applies style #3 to the first five characters in the second line of the widget: SCISetStyling((0, 1), 5, 3) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``numChar`` (**int**): number of characters to style. * ``style`` (**int**): Scintilla style number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not self.isPositionValid(line, col): return pos = self.positionFromLineIndex(line, col) self.SendScintilla(self.SCI_STARTSTYLING, pos, 0xFF) self.SendScintilla(self.SCI_SETSTYLING, numChar, style)
[ "def", "SCISetStyling", "(", "self", ",", "line", ":", "int", ",", "col", ":", "int", ",", "numChar", ":", "int", ",", "style", ":", "bytearray", ")", ":", "if", "not", "self", ".", "isPositionValid", "(", "line", ",", "col", ")", ":", "return", "pos", "=", "self", ".", "positionFromLineIndex", "(", "line", ",", "col", ")", "self", ".", "SendScintilla", "(", "self", ".", "SCI_STARTSTYLING", ",", "pos", ",", "0xFF", ")", "self", ".", "SendScintilla", "(", "self", ".", "SCI_SETSTYLING", ",", "numChar", ",", "style", ")" ]
Pythonic wrapper for the SCI_SETSTYLING command. For example, the following code applies style #3 to the first five characters in the second line of the widget: SCISetStyling((0, 1), 5, 3) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``numChar`` (**int**): number of characters to style. * ``style`` (**int**): Scintilla style number. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Pythonic", "wrapper", "for", "the", "SCI_SETSTYLING", "command", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L898-L929
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.SCISetStylingEx
def SCISetStylingEx(self, line: int, col: int, style: bytearray): """ Pythonic wrapper for the SCI_SETSTYLINGEX command. For example, the following code will fetch the styling for the first five characters applies it verbatim to the next five characters. text, style = SCIGetStyledText((0, 0, 0, 5)) SCISetStylingEx((0, 5), style) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``style`` (**bytearray**): Scintilla style bits. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not self.isPositionValid(line, col): return pos = self.positionFromLineIndex(line, col) self.SendScintilla(self.SCI_STARTSTYLING, pos, 0xFF) self.SendScintilla(self.SCI_SETSTYLINGEX, len(style), style)
python
def SCISetStylingEx(self, line: int, col: int, style: bytearray): """ Pythonic wrapper for the SCI_SETSTYLINGEX command. For example, the following code will fetch the styling for the first five characters applies it verbatim to the next five characters. text, style = SCIGetStyledText((0, 0, 0, 5)) SCISetStylingEx((0, 5), style) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``style`` (**bytearray**): Scintilla style bits. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not self.isPositionValid(line, col): return pos = self.positionFromLineIndex(line, col) self.SendScintilla(self.SCI_STARTSTYLING, pos, 0xFF) self.SendScintilla(self.SCI_SETSTYLINGEX, len(style), style)
[ "def", "SCISetStylingEx", "(", "self", ",", "line", ":", "int", ",", "col", ":", "int", ",", "style", ":", "bytearray", ")", ":", "if", "not", "self", ".", "isPositionValid", "(", "line", ",", "col", ")", ":", "return", "pos", "=", "self", ".", "positionFromLineIndex", "(", "line", ",", "col", ")", "self", ".", "SendScintilla", "(", "self", ".", "SCI_STARTSTYLING", ",", "pos", ",", "0xFF", ")", "self", ".", "SendScintilla", "(", "self", ".", "SCI_SETSTYLINGEX", ",", "len", "(", "style", ")", ",", "style", ")" ]
Pythonic wrapper for the SCI_SETSTYLINGEX command. For example, the following code will fetch the styling for the first five characters applies it verbatim to the next five characters. text, style = SCIGetStyledText((0, 0, 0, 5)) SCISetStylingEx((0, 5), style) |Args| * ``line`` (**int**): line number where to start styling. * ``col`` (**int**): column number where to start styling. * ``style`` (**bytearray**): Scintilla style bits. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Pythonic", "wrapper", "for", "the", "SCI_SETSTYLINGEX", "command", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L932-L961
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.qteSetLexer
def qteSetLexer(self, lexer): """ Specify the lexer to use. The only difference between this method and the native ``setLexer`` method is that expects ``lexer`` to be class, not an instance. Another feature is that this method knows which ``lexer`` class was installed last, and this information can be retrieved with ``qteLexer`` again. |Args| * ``lexer`` (**QsciLexer**): lexer class (*not* instance). |Returns| **None** |Raises| * **QtmacsOtherError** if lexer is not a class. """ if (lexer is not None) and (not issubclass(lexer, Qsci.QsciLexer)): QtmacsOtherError('lexer must be a class object and derived from' ' <b>QsciLexer</b>') return # Install and backup the lexer class. self.qteLastLexer = lexer if lexer is None: self.setLexer(None) else: self.setLexer(lexer()) # Make all fonts in the style mono space. self.setMonospace()
python
def qteSetLexer(self, lexer): """ Specify the lexer to use. The only difference between this method and the native ``setLexer`` method is that expects ``lexer`` to be class, not an instance. Another feature is that this method knows which ``lexer`` class was installed last, and this information can be retrieved with ``qteLexer`` again. |Args| * ``lexer`` (**QsciLexer**): lexer class (*not* instance). |Returns| **None** |Raises| * **QtmacsOtherError** if lexer is not a class. """ if (lexer is not None) and (not issubclass(lexer, Qsci.QsciLexer)): QtmacsOtherError('lexer must be a class object and derived from' ' <b>QsciLexer</b>') return # Install and backup the lexer class. self.qteLastLexer = lexer if lexer is None: self.setLexer(None) else: self.setLexer(lexer()) # Make all fonts in the style mono space. self.setMonospace()
[ "def", "qteSetLexer", "(", "self", ",", "lexer", ")", ":", "if", "(", "lexer", "is", "not", "None", ")", "and", "(", "not", "issubclass", "(", "lexer", ",", "Qsci", ".", "QsciLexer", ")", ")", ":", "QtmacsOtherError", "(", "'lexer must be a class object and derived from'", "' <b>QsciLexer</b>'", ")", "return", "# Install and backup the lexer class.", "self", ".", "qteLastLexer", "=", "lexer", "if", "lexer", "is", "None", ":", "self", ".", "setLexer", "(", "None", ")", "else", ":", "self", ".", "setLexer", "(", "lexer", "(", ")", ")", "# Make all fonts in the style mono space.", "self", ".", "setMonospace", "(", ")" ]
Specify the lexer to use. The only difference between this method and the native ``setLexer`` method is that expects ``lexer`` to be class, not an instance. Another feature is that this method knows which ``lexer`` class was installed last, and this information can be retrieved with ``qteLexer`` again. |Args| * ``lexer`` (**QsciLexer**): lexer class (*not* instance). |Returns| **None** |Raises| * **QtmacsOtherError** if lexer is not a class.
[ "Specify", "the", "lexer", "to", "use", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L963-L998
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.setMonospace
def setMonospace(self): """ Fix the fonts of the first 32 styles to a mono space one. |Args| * **None** |Returns| **None** |Raises| * **None** """ font = bytes('courier new', 'utf-8') for ii in range(32): self.SendScintilla(self.SCI_STYLESETFONT, ii, font)
python
def setMonospace(self): """ Fix the fonts of the first 32 styles to a mono space one. |Args| * **None** |Returns| **None** |Raises| * **None** """ font = bytes('courier new', 'utf-8') for ii in range(32): self.SendScintilla(self.SCI_STYLESETFONT, ii, font)
[ "def", "setMonospace", "(", "self", ")", ":", "font", "=", "bytes", "(", "'courier new'", ",", "'utf-8'", ")", "for", "ii", "in", "range", "(", "32", ")", ":", "self", ".", "SendScintilla", "(", "self", ".", "SCI_STYLESETFONT", ",", "ii", ",", "font", ")" ]
Fix the fonts of the first 32 styles to a mono space one. |Args| * **None** |Returns| **None** |Raises| * **None**
[ "Fix", "the", "fonts", "of", "the", "first", "32", "styles", "to", "a", "mono", "space", "one", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L1022-L1040
train
olitheolix/qtmacs
qtmacs/extensions/qtmacsscintilla_widget.py
QtmacsScintilla.setModified
def setModified(self, isModified: bool): """ Set the modified state to ``isModified``. From a programmer's perspective this method does the same as the native ``QsciScintilla`` method but also ensures that the undo framework knows when the document state was changed. |Args| * ``isModified`` (**bool**): whether or not the document is considered unmodified. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not isModified: self.qteUndoStack.saveState() super().setModified(isModified)
python
def setModified(self, isModified: bool): """ Set the modified state to ``isModified``. From a programmer's perspective this method does the same as the native ``QsciScintilla`` method but also ensures that the undo framework knows when the document state was changed. |Args| * ``isModified`` (**bool**): whether or not the document is considered unmodified. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ if not isModified: self.qteUndoStack.saveState() super().setModified(isModified)
[ "def", "setModified", "(", "self", ",", "isModified", ":", "bool", ")", ":", "if", "not", "isModified", ":", "self", ".", "qteUndoStack", ".", "saveState", "(", ")", "super", "(", ")", ".", "setModified", "(", "isModified", ")" ]
Set the modified state to ``isModified``. From a programmer's perspective this method does the same as the native ``QsciScintilla`` method but also ensures that the undo framework knows when the document state was changed. |Args| * ``isModified`` (**bool**): whether or not the document is considered unmodified. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
[ "Set", "the", "modified", "state", "to", "isModified", "." ]
36253b082b82590f183fe154b053eb3a1e741be2
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L1043-L1066
train
vecnet/vecnet.openmalaria
vecnet/openmalaria/scenario/core.py
attribute
def attribute(func): """ Decorator used to declare that property is a tag attribute """ def inner(self): name, attribute_type = func(self) if not name: name = func.__name__ try: return attribute_type(self.et.attrib[name]) except KeyError: raise AttributeError return inner
python
def attribute(func): """ Decorator used to declare that property is a tag attribute """ def inner(self): name, attribute_type = func(self) if not name: name = func.__name__ try: return attribute_type(self.et.attrib[name]) except KeyError: raise AttributeError return inner
[ "def", "attribute", "(", "func", ")", ":", "def", "inner", "(", "self", ")", ":", "name", ",", "attribute_type", "=", "func", "(", "self", ")", "if", "not", "name", ":", "name", "=", "func", ".", "__name__", "try", ":", "return", "attribute_type", "(", "self", ".", "et", ".", "attrib", "[", "name", "]", ")", "except", "KeyError", ":", "raise", "AttributeError", "return", "inner" ]
Decorator used to declare that property is a tag attribute
[ "Decorator", "used", "to", "declare", "that", "property", "is", "a", "tag", "attribute" ]
795bc9d1b81a6c664f14879edda7a7c41188e95a
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L14-L27
train
vecnet/vecnet.openmalaria
vecnet/openmalaria/scenario/core.py
section
def section(func): """ Decorator used to declare that the property is xml section """ def inner(self): return func(self)(self.et.find(func.__name__)) return inner
python
def section(func): """ Decorator used to declare that the property is xml section """ def inner(self): return func(self)(self.et.find(func.__name__)) return inner
[ "def", "section", "(", "func", ")", ":", "def", "inner", "(", "self", ")", ":", "return", "func", "(", "self", ")", "(", "self", ".", "et", ".", "find", "(", "func", ".", "__name__", ")", ")", "return", "inner" ]
Decorator used to declare that the property is xml section
[ "Decorator", "used", "to", "declare", "that", "the", "property", "is", "xml", "section" ]
795bc9d1b81a6c664f14879edda7a7c41188e95a
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L40-L46
train
vecnet/vecnet.openmalaria
vecnet/openmalaria/scenario/core.py
tag_value
def tag_value(func): """ Decorator used to declare that the property is attribute of embedded tag """ def inner(self): tag, attrib, attrib_type = func(self) tag_obj = self.et.find(tag) if tag_obj is not None: try: return attrib_type(self.et.find(tag).attrib[attrib]) except KeyError: raise AttributeError return inner
python
def tag_value(func): """ Decorator used to declare that the property is attribute of embedded tag """ def inner(self): tag, attrib, attrib_type = func(self) tag_obj = self.et.find(tag) if tag_obj is not None: try: return attrib_type(self.et.find(tag).attrib[attrib]) except KeyError: raise AttributeError return inner
[ "def", "tag_value", "(", "func", ")", ":", "def", "inner", "(", "self", ")", ":", "tag", ",", "attrib", ",", "attrib_type", "=", "func", "(", "self", ")", "tag_obj", "=", "self", ".", "et", ".", "find", "(", "tag", ")", "if", "tag_obj", "is", "not", "None", ":", "try", ":", "return", "attrib_type", "(", "self", ".", "et", ".", "find", "(", "tag", ")", ".", "attrib", "[", "attrib", "]", ")", "except", "KeyError", ":", "raise", "AttributeError", "return", "inner" ]
Decorator used to declare that the property is attribute of embedded tag
[ "Decorator", "used", "to", "declare", "that", "the", "property", "is", "attribute", "of", "embedded", "tag" ]
795bc9d1b81a6c664f14879edda7a7c41188e95a
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L49-L63
train
vecnet/vecnet.openmalaria
vecnet/openmalaria/scenario/core.py
tag_value_setter
def tag_value_setter(tag, attrib): """ Decorator used to declare that the setter function is an attribute of embedded tag """ def outer(func): def inner(self, value): tag_elem = self.et.find(tag) if tag_elem is None: et = ElementTree.fromstring("<{}></{}>".format(tag, tag)) self.et.append(et) tag_elem = self.et.find(tag) tag_elem.attrib[attrib] = str(value) return inner return outer
python
def tag_value_setter(tag, attrib): """ Decorator used to declare that the setter function is an attribute of embedded tag """ def outer(func): def inner(self, value): tag_elem = self.et.find(tag) if tag_elem is None: et = ElementTree.fromstring("<{}></{}>".format(tag, tag)) self.et.append(et) tag_elem = self.et.find(tag) tag_elem.attrib[attrib] = str(value) return inner return outer
[ "def", "tag_value_setter", "(", "tag", ",", "attrib", ")", ":", "def", "outer", "(", "func", ")", ":", "def", "inner", "(", "self", ",", "value", ")", ":", "tag_elem", "=", "self", ".", "et", ".", "find", "(", "tag", ")", "if", "tag_elem", "is", "None", ":", "et", "=", "ElementTree", ".", "fromstring", "(", "\"<{}></{}>\"", ".", "format", "(", "tag", ",", "tag", ")", ")", "self", ".", "et", ".", "append", "(", "et", ")", "tag_elem", "=", "self", ".", "et", ".", "find", "(", "tag", ")", "tag_elem", ".", "attrib", "[", "attrib", "]", "=", "str", "(", "value", ")", "return", "inner", "return", "outer" ]
Decorator used to declare that the setter function is an attribute of embedded tag
[ "Decorator", "used", "to", "declare", "that", "the", "setter", "function", "is", "an", "attribute", "of", "embedded", "tag" ]
795bc9d1b81a6c664f14879edda7a7c41188e95a
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L66-L81
train
projectshift/shift-schema
shiftschema/validators/abstract_validator.py
AbstractValidator.run
def run(self, value, model=None, context=None): """ Run validation Wraps concrete implementation to ensure custom validators return proper type of result. :param value: a value to validate :param model: parent model of the property :param context: parent model or custom context :return: shiftschema.result.Error """ res = self.validate(value, model, context) if not isinstance(res, Error): err = 'Validator "{}" result must be of type "{}", got "{}"' raise InvalidErrorType(err.format( self.__class__.__name__, Error, type(res)) ) return res
python
def run(self, value, model=None, context=None): """ Run validation Wraps concrete implementation to ensure custom validators return proper type of result. :param value: a value to validate :param model: parent model of the property :param context: parent model or custom context :return: shiftschema.result.Error """ res = self.validate(value, model, context) if not isinstance(res, Error): err = 'Validator "{}" result must be of type "{}", got "{}"' raise InvalidErrorType(err.format( self.__class__.__name__, Error, type(res)) ) return res
[ "def", "run", "(", "self", ",", "value", ",", "model", "=", "None", ",", "context", "=", "None", ")", ":", "res", "=", "self", ".", "validate", "(", "value", ",", "model", ",", "context", ")", "if", "not", "isinstance", "(", "res", ",", "Error", ")", ":", "err", "=", "'Validator \"{}\" result must be of type \"{}\", got \"{}\"'", "raise", "InvalidErrorType", "(", "err", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "Error", ",", "type", "(", "res", ")", ")", ")", "return", "res" ]
Run validation Wraps concrete implementation to ensure custom validators return proper type of result. :param value: a value to validate :param model: parent model of the property :param context: parent model or custom context :return: shiftschema.result.Error
[ "Run", "validation", "Wraps", "concrete", "implementation", "to", "ensure", "custom", "validators", "return", "proper", "type", "of", "result", "." ]
07787b540d3369bb37217ffbfbe629118edaf0eb
https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/validators/abstract_validator.py#L29-L49
train
alextricity25/dwell_in_you_richly
diyr/sinks/base.py
BaseSinkClass._collect_data
def _collect_data(self): """ Returns a list of all the data gathered from the engine iterable. """ all_data = [] for line in self.engine.run_engine(): logging.debug("Adding {} to all_data".format(line)) all_data.append(line.copy()) logging.debug("all_data is now {}".format(all_data)) return all_data
python
def _collect_data(self): """ Returns a list of all the data gathered from the engine iterable. """ all_data = [] for line in self.engine.run_engine(): logging.debug("Adding {} to all_data".format(line)) all_data.append(line.copy()) logging.debug("all_data is now {}".format(all_data)) return all_data
[ "def", "_collect_data", "(", "self", ")", ":", "all_data", "=", "[", "]", "for", "line", "in", "self", ".", "engine", ".", "run_engine", "(", ")", ":", "logging", ".", "debug", "(", "\"Adding {} to all_data\"", ".", "format", "(", "line", ")", ")", "all_data", ".", "append", "(", "line", ".", "copy", "(", ")", ")", "logging", ".", "debug", "(", "\"all_data is now {}\"", ".", "format", "(", "all_data", ")", ")", "return", "all_data" ]
Returns a list of all the data gathered from the engine iterable.
[ "Returns", "a", "list", "of", "all", "the", "data", "gathered", "from", "the", "engine", "iterable", "." ]
e705e1bc4fc0b8d2aa25680dfc432762b361c783
https://github.com/alextricity25/dwell_in_you_richly/blob/e705e1bc4fc0b8d2aa25680dfc432762b361c783/diyr/sinks/base.py#L23-L34
train
pmacosta/pexdoc
pexdoc/pinspect.py
_get_module_name_from_fname
def _get_module_name_from_fname(fname): """Get module name from module file name.""" fname = fname.replace(".pyc", ".py") for mobj in sys.modules.values(): if ( hasattr(mobj, "__file__") and mobj.__file__ and (mobj.__file__.replace(".pyc", ".py") == fname) ): module_name = mobj.__name__ return module_name raise RuntimeError("Module could not be found")
python
def _get_module_name_from_fname(fname): """Get module name from module file name.""" fname = fname.replace(".pyc", ".py") for mobj in sys.modules.values(): if ( hasattr(mobj, "__file__") and mobj.__file__ and (mobj.__file__.replace(".pyc", ".py") == fname) ): module_name = mobj.__name__ return module_name raise RuntimeError("Module could not be found")
[ "def", "_get_module_name_from_fname", "(", "fname", ")", ":", "fname", "=", "fname", ".", "replace", "(", "\".pyc\"", ",", "\".py\"", ")", "for", "mobj", "in", "sys", ".", "modules", ".", "values", "(", ")", ":", "if", "(", "hasattr", "(", "mobj", ",", "\"__file__\"", ")", "and", "mobj", ".", "__file__", "and", "(", "mobj", ".", "__file__", ".", "replace", "(", "\".pyc\"", ",", "\".py\"", ")", "==", "fname", ")", ")", ":", "module_name", "=", "mobj", ".", "__name__", "return", "module_name", "raise", "RuntimeError", "(", "\"Module could not be found\"", ")" ]
Get module name from module file name.
[ "Get", "module", "name", "from", "module", "file", "name", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L40-L51
train
pmacosta/pexdoc
pexdoc/pinspect.py
get_function_args
def get_function_args(func, no_self=False, no_varargs=False): """ Return tuple of the function argument names in the order of the function signature. :param func: Function :type func: function object :param no_self: Flag that indicates whether the function argument *self*, if present, is included in the output (False) or not (True) :type no_self: boolean :param no_varargs: Flag that indicates whether keyword arguments are included in the output (True) or not (False) :type no_varargs: boolean :rtype: tuple For example: >>> import pexdoc.pinspect >>> class MyClass(object): ... def __init__(self, value, **kwargs): ... pass ... >>> pexdoc.pinspect.get_function_args(MyClass.__init__) ('self', 'value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True ... ) ('value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True, no_varargs=True ... ) ('value',) >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_varargs=True ... ) ('self', 'value') """ par_dict = signature(func).parameters # Mark positional and/or keyword arguments (if any) pos = lambda x: x.kind == Parameter.VAR_POSITIONAL kw = lambda x: x.kind == Parameter.VAR_KEYWORD opts = ["", "*", "**"] args = [ "{prefix}{arg}".format(prefix=opts[pos(value) + 2 * kw(value)], arg=par) for par, value in par_dict.items() ] # Filter out 'self' from parameter list (optional) self_filtered_args = ( args if not args else (args[1 if (args[0] == "self") and no_self else 0 :]) ) # Filter out positional or keyword arguments (optional) pos = lambda x: (len(x) > 1) and (x[0] == "*") and (x[1] != "*") kw = lambda x: (len(x) > 2) and (x[:2] == "**") varargs_filtered_args = [ arg for arg in self_filtered_args if (not no_varargs) or all([no_varargs, not pos(arg), not kw(arg)]) ] return tuple(varargs_filtered_args)
python
def get_function_args(func, no_self=False, no_varargs=False): """ Return tuple of the function argument names in the order of the function signature. :param func: Function :type func: function object :param no_self: Flag that indicates whether the function argument *self*, if present, is included in the output (False) or not (True) :type no_self: boolean :param no_varargs: Flag that indicates whether keyword arguments are included in the output (True) or not (False) :type no_varargs: boolean :rtype: tuple For example: >>> import pexdoc.pinspect >>> class MyClass(object): ... def __init__(self, value, **kwargs): ... pass ... >>> pexdoc.pinspect.get_function_args(MyClass.__init__) ('self', 'value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True ... ) ('value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True, no_varargs=True ... ) ('value',) >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_varargs=True ... ) ('self', 'value') """ par_dict = signature(func).parameters # Mark positional and/or keyword arguments (if any) pos = lambda x: x.kind == Parameter.VAR_POSITIONAL kw = lambda x: x.kind == Parameter.VAR_KEYWORD opts = ["", "*", "**"] args = [ "{prefix}{arg}".format(prefix=opts[pos(value) + 2 * kw(value)], arg=par) for par, value in par_dict.items() ] # Filter out 'self' from parameter list (optional) self_filtered_args = ( args if not args else (args[1 if (args[0] == "self") and no_self else 0 :]) ) # Filter out positional or keyword arguments (optional) pos = lambda x: (len(x) > 1) and (x[0] == "*") and (x[1] != "*") kw = lambda x: (len(x) > 2) and (x[:2] == "**") varargs_filtered_args = [ arg for arg in self_filtered_args if (not no_varargs) or all([no_varargs, not pos(arg), not kw(arg)]) ] return tuple(varargs_filtered_args)
[ "def", "get_function_args", "(", "func", ",", "no_self", "=", "False", ",", "no_varargs", "=", "False", ")", ":", "par_dict", "=", "signature", "(", "func", ")", ".", "parameters", "# Mark positional and/or keyword arguments (if any)", "pos", "=", "lambda", "x", ":", "x", ".", "kind", "==", "Parameter", ".", "VAR_POSITIONAL", "kw", "=", "lambda", "x", ":", "x", ".", "kind", "==", "Parameter", ".", "VAR_KEYWORD", "opts", "=", "[", "\"\"", ",", "\"*\"", ",", "\"**\"", "]", "args", "=", "[", "\"{prefix}{arg}\"", ".", "format", "(", "prefix", "=", "opts", "[", "pos", "(", "value", ")", "+", "2", "*", "kw", "(", "value", ")", "]", ",", "arg", "=", "par", ")", "for", "par", ",", "value", "in", "par_dict", ".", "items", "(", ")", "]", "# Filter out 'self' from parameter list (optional)", "self_filtered_args", "=", "(", "args", "if", "not", "args", "else", "(", "args", "[", "1", "if", "(", "args", "[", "0", "]", "==", "\"self\"", ")", "and", "no_self", "else", "0", ":", "]", ")", ")", "# Filter out positional or keyword arguments (optional)", "pos", "=", "lambda", "x", ":", "(", "len", "(", "x", ")", ">", "1", ")", "and", "(", "x", "[", "0", "]", "==", "\"*\"", ")", "and", "(", "x", "[", "1", "]", "!=", "\"*\"", ")", "kw", "=", "lambda", "x", ":", "(", "len", "(", "x", ")", ">", "2", ")", "and", "(", "x", "[", ":", "2", "]", "==", "\"**\"", ")", "varargs_filtered_args", "=", "[", "arg", "for", "arg", "in", "self_filtered_args", "if", "(", "not", "no_varargs", ")", "or", "all", "(", "[", "no_varargs", ",", "not", "pos", "(", "arg", ")", ",", "not", "kw", "(", "arg", ")", "]", ")", "]", "return", "tuple", "(", "varargs_filtered_args", ")" ]
Return tuple of the function argument names in the order of the function signature. :param func: Function :type func: function object :param no_self: Flag that indicates whether the function argument *self*, if present, is included in the output (False) or not (True) :type no_self: boolean :param no_varargs: Flag that indicates whether keyword arguments are included in the output (True) or not (False) :type no_varargs: boolean :rtype: tuple For example: >>> import pexdoc.pinspect >>> class MyClass(object): ... def __init__(self, value, **kwargs): ... pass ... >>> pexdoc.pinspect.get_function_args(MyClass.__init__) ('self', 'value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True ... ) ('value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True, no_varargs=True ... ) ('value',) >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_varargs=True ... ) ('self', 'value')
[ "Return", "tuple", "of", "the", "function", "argument", "names", "in", "the", "order", "of", "the", "function", "signature", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L66-L126
train
pmacosta/pexdoc
pexdoc/pinspect.py
get_module_name
def get_module_name(module_obj): r""" Retrieve the module name from a module object. :param module_obj: Module object :type module_obj: object :rtype: string :raises: * RuntimeError (Argument \`module_obj\` is not valid) * RuntimeError (Module object \`*[module_name]*\` could not be found in loaded modules) For example: >>> import pexdoc.pinspect >>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect']) 'pexdoc.pinspect' """ if not is_object_module(module_obj): raise RuntimeError("Argument `module_obj` is not valid") name = module_obj.__name__ msg = "Module object `{name}` could not be found in loaded modules" if name not in sys.modules: raise RuntimeError(msg.format(name=name)) return name
python
def get_module_name(module_obj): r""" Retrieve the module name from a module object. :param module_obj: Module object :type module_obj: object :rtype: string :raises: * RuntimeError (Argument \`module_obj\` is not valid) * RuntimeError (Module object \`*[module_name]*\` could not be found in loaded modules) For example: >>> import pexdoc.pinspect >>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect']) 'pexdoc.pinspect' """ if not is_object_module(module_obj): raise RuntimeError("Argument `module_obj` is not valid") name = module_obj.__name__ msg = "Module object `{name}` could not be found in loaded modules" if name not in sys.modules: raise RuntimeError(msg.format(name=name)) return name
[ "def", "get_module_name", "(", "module_obj", ")", ":", "if", "not", "is_object_module", "(", "module_obj", ")", ":", "raise", "RuntimeError", "(", "\"Argument `module_obj` is not valid\"", ")", "name", "=", "module_obj", ".", "__name__", "msg", "=", "\"Module object `{name}` could not be found in loaded modules\"", "if", "name", "not", "in", "sys", ".", "modules", ":", "raise", "RuntimeError", "(", "msg", ".", "format", "(", "name", "=", "name", ")", ")", "return", "name" ]
r""" Retrieve the module name from a module object. :param module_obj: Module object :type module_obj: object :rtype: string :raises: * RuntimeError (Argument \`module_obj\` is not valid) * RuntimeError (Module object \`*[module_name]*\` could not be found in loaded modules) For example: >>> import pexdoc.pinspect >>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect']) 'pexdoc.pinspect'
[ "r", "Retrieve", "the", "module", "name", "from", "a", "module", "object", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L129-L156
train
pmacosta/pexdoc
pexdoc/pinspect.py
private_props
def private_props(obj): """ Yield private properties of an object. A private property is defined as one that has a single underscore (:code:`_`) before its name :param obj: Object :type obj: object :returns: iterator """ # Get private properties but NOT magic methods props = [item for item in dir(obj)] priv_props = [_PRIVATE_PROP_REGEXP.match(item) for item in props] call_props = [callable(getattr(obj, item)) for item in props] iobj = zip(props, priv_props, call_props) for obj_name in [prop for prop, priv, call in iobj if priv and (not call)]: yield obj_name
python
def private_props(obj): """ Yield private properties of an object. A private property is defined as one that has a single underscore (:code:`_`) before its name :param obj: Object :type obj: object :returns: iterator """ # Get private properties but NOT magic methods props = [item for item in dir(obj)] priv_props = [_PRIVATE_PROP_REGEXP.match(item) for item in props] call_props = [callable(getattr(obj, item)) for item in props] iobj = zip(props, priv_props, call_props) for obj_name in [prop for prop, priv, call in iobj if priv and (not call)]: yield obj_name
[ "def", "private_props", "(", "obj", ")", ":", "# Get private properties but NOT magic methods", "props", "=", "[", "item", "for", "item", "in", "dir", "(", "obj", ")", "]", "priv_props", "=", "[", "_PRIVATE_PROP_REGEXP", ".", "match", "(", "item", ")", "for", "item", "in", "props", "]", "call_props", "=", "[", "callable", "(", "getattr", "(", "obj", ",", "item", ")", ")", "for", "item", "in", "props", "]", "iobj", "=", "zip", "(", "props", ",", "priv_props", ",", "call_props", ")", "for", "obj_name", "in", "[", "prop", "for", "prop", ",", "priv", ",", "call", "in", "iobj", "if", "priv", "and", "(", "not", "call", ")", "]", ":", "yield", "obj_name" ]
Yield private properties of an object. A private property is defined as one that has a single underscore (:code:`_`) before its name :param obj: Object :type obj: object :returns: iterator
[ "Yield", "private", "properties", "of", "an", "object", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L183-L201
train
pmacosta/pexdoc
pexdoc/pinspect.py
Callables._check_intersection
def _check_intersection(self, other): """Check that intersection of two objects has the same information.""" # pylint: disable=C0123 props = ["_callables_db", "_reverse_callables_db", "_modules_dict"] for prop in props: self_dict = getattr(self, prop) other_dict = getattr(other, prop) keys_self = set(self_dict.keys()) keys_other = set(other_dict.keys()) for key in keys_self & keys_other: svalue = self_dict[key] ovalue = other_dict[key] same_type = type(svalue) == type(ovalue) if same_type: list_comp = isinstance(svalue, list) and any( [item not in svalue for item in ovalue] ) str_comp = isinstance(svalue, str) and svalue != ovalue dict_comp = isinstance(svalue, dict) and svalue != ovalue comp = any([list_comp, str_comp, dict_comp]) if (not same_type) or (same_type and comp): emsg = "Conflicting information between objects" raise RuntimeError(emsg)
python
def _check_intersection(self, other): """Check that intersection of two objects has the same information.""" # pylint: disable=C0123 props = ["_callables_db", "_reverse_callables_db", "_modules_dict"] for prop in props: self_dict = getattr(self, prop) other_dict = getattr(other, prop) keys_self = set(self_dict.keys()) keys_other = set(other_dict.keys()) for key in keys_self & keys_other: svalue = self_dict[key] ovalue = other_dict[key] same_type = type(svalue) == type(ovalue) if same_type: list_comp = isinstance(svalue, list) and any( [item not in svalue for item in ovalue] ) str_comp = isinstance(svalue, str) and svalue != ovalue dict_comp = isinstance(svalue, dict) and svalue != ovalue comp = any([list_comp, str_comp, dict_comp]) if (not same_type) or (same_type and comp): emsg = "Conflicting information between objects" raise RuntimeError(emsg)
[ "def", "_check_intersection", "(", "self", ",", "other", ")", ":", "# pylint: disable=C0123", "props", "=", "[", "\"_callables_db\"", ",", "\"_reverse_callables_db\"", ",", "\"_modules_dict\"", "]", "for", "prop", "in", "props", ":", "self_dict", "=", "getattr", "(", "self", ",", "prop", ")", "other_dict", "=", "getattr", "(", "other", ",", "prop", ")", "keys_self", "=", "set", "(", "self_dict", ".", "keys", "(", ")", ")", "keys_other", "=", "set", "(", "other_dict", ".", "keys", "(", ")", ")", "for", "key", "in", "keys_self", "&", "keys_other", ":", "svalue", "=", "self_dict", "[", "key", "]", "ovalue", "=", "other_dict", "[", "key", "]", "same_type", "=", "type", "(", "svalue", ")", "==", "type", "(", "ovalue", ")", "if", "same_type", ":", "list_comp", "=", "isinstance", "(", "svalue", ",", "list", ")", "and", "any", "(", "[", "item", "not", "in", "svalue", "for", "item", "in", "ovalue", "]", ")", "str_comp", "=", "isinstance", "(", "svalue", ",", "str", ")", "and", "svalue", "!=", "ovalue", "dict_comp", "=", "isinstance", "(", "svalue", ",", "dict", ")", "and", "svalue", "!=", "ovalue", "comp", "=", "any", "(", "[", "list_comp", ",", "str_comp", ",", "dict_comp", "]", ")", "if", "(", "not", "same_type", ")", "or", "(", "same_type", "and", "comp", ")", ":", "emsg", "=", "\"Conflicting information between objects\"", "raise", "RuntimeError", "(", "emsg", ")" ]
Check that intersection of two objects has the same information.
[ "Check", "that", "intersection", "of", "two", "objects", "has", "the", "same", "information", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L484-L506
train
pmacosta/pexdoc
pexdoc/pinspect.py
Callables.get_callable_from_line
def get_callable_from_line(self, module_file, lineno): """Get the callable that the line number belongs to.""" module_name = _get_module_name_from_fname(module_file) if module_name not in self._modules_dict: self.trace([module_file]) ret = None # Sort callables by starting line number iobj = sorted(self._modules_dict[module_name], key=lambda x: x["code_id"][1]) for value in iobj: if value["code_id"][1] <= lineno <= value["last_lineno"]: ret = value["name"] elif value["code_id"][1] > lineno: break return ret if ret else module_name
python
def get_callable_from_line(self, module_file, lineno): """Get the callable that the line number belongs to.""" module_name = _get_module_name_from_fname(module_file) if module_name not in self._modules_dict: self.trace([module_file]) ret = None # Sort callables by starting line number iobj = sorted(self._modules_dict[module_name], key=lambda x: x["code_id"][1]) for value in iobj: if value["code_id"][1] <= lineno <= value["last_lineno"]: ret = value["name"] elif value["code_id"][1] > lineno: break return ret if ret else module_name
[ "def", "get_callable_from_line", "(", "self", ",", "module_file", ",", "lineno", ")", ":", "module_name", "=", "_get_module_name_from_fname", "(", "module_file", ")", "if", "module_name", "not", "in", "self", ".", "_modules_dict", ":", "self", ".", "trace", "(", "[", "module_file", "]", ")", "ret", "=", "None", "# Sort callables by starting line number", "iobj", "=", "sorted", "(", "self", ".", "_modules_dict", "[", "module_name", "]", ",", "key", "=", "lambda", "x", ":", "x", "[", "\"code_id\"", "]", "[", "1", "]", ")", "for", "value", "in", "iobj", ":", "if", "value", "[", "\"code_id\"", "]", "[", "1", "]", "<=", "lineno", "<=", "value", "[", "\"last_lineno\"", "]", ":", "ret", "=", "value", "[", "\"name\"", "]", "elif", "value", "[", "\"code_id\"", "]", "[", "1", "]", ">", "lineno", ":", "break", "return", "ret", "if", "ret", "else", "module_name" ]
Get the callable that the line number belongs to.
[ "Get", "the", "callable", "that", "the", "line", "number", "belongs", "to", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L512-L525
train
pmacosta/pexdoc
pexdoc/pinspect.py
Callables.refresh
def refresh(self): """Re-traces modules modified since the time they were traced.""" self.trace(list(self._fnames.keys()), _refresh=True)
python
def refresh(self): """Re-traces modules modified since the time they were traced.""" self.trace(list(self._fnames.keys()), _refresh=True)
[ "def", "refresh", "(", "self", ")", ":", "self", ".", "trace", "(", "list", "(", "self", ".", "_fnames", ".", "keys", "(", ")", ")", ",", "_refresh", "=", "True", ")" ]
Re-traces modules modified since the time they were traced.
[ "Re", "-", "traces", "modules", "modified", "since", "the", "time", "they", "were", "traced", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L578-L580
train
pmacosta/pexdoc
pexdoc/pinspect.py
Callables.save
def save(self, callables_fname): r""" Save traced modules information to a `JSON`_ file. If the file exists it is overwritten :param callables_fname: File name :type callables_fname: :ref:`FileName` :raises: RuntimeError (Argument \\`fname\\` is not valid) """ # Validate file name _validate_fname(callables_fname) # JSON keys have to be strings but the reverse callables dictionary # keys are tuples, where the first item is a file name and the # second item is the starting line of the callable within that file # (dictionary value), thus need to convert the key to a string items = self._reverse_callables_db.items() fdict = { "_callables_db": self._callables_db, "_reverse_callables_db": dict([(str(k), v) for k, v in items]), "_modules_dict": self._modules_dict, "_fnames": self._fnames, "_module_names": self._module_names, "_class_names": self._class_names, } with open(callables_fname, "w") as fobj: json.dump(fdict, fobj)
python
def save(self, callables_fname): r""" Save traced modules information to a `JSON`_ file. If the file exists it is overwritten :param callables_fname: File name :type callables_fname: :ref:`FileName` :raises: RuntimeError (Argument \\`fname\\` is not valid) """ # Validate file name _validate_fname(callables_fname) # JSON keys have to be strings but the reverse callables dictionary # keys are tuples, where the first item is a file name and the # second item is the starting line of the callable within that file # (dictionary value), thus need to convert the key to a string items = self._reverse_callables_db.items() fdict = { "_callables_db": self._callables_db, "_reverse_callables_db": dict([(str(k), v) for k, v in items]), "_modules_dict": self._modules_dict, "_fnames": self._fnames, "_module_names": self._module_names, "_class_names": self._class_names, } with open(callables_fname, "w") as fobj: json.dump(fdict, fobj)
[ "def", "save", "(", "self", ",", "callables_fname", ")", ":", "# Validate file name", "_validate_fname", "(", "callables_fname", ")", "# JSON keys have to be strings but the reverse callables dictionary", "# keys are tuples, where the first item is a file name and the", "# second item is the starting line of the callable within that file", "# (dictionary value), thus need to convert the key to a string", "items", "=", "self", ".", "_reverse_callables_db", ".", "items", "(", ")", "fdict", "=", "{", "\"_callables_db\"", ":", "self", ".", "_callables_db", ",", "\"_reverse_callables_db\"", ":", "dict", "(", "[", "(", "str", "(", "k", ")", ",", "v", ")", "for", "k", ",", "v", "in", "items", "]", ")", ",", "\"_modules_dict\"", ":", "self", ".", "_modules_dict", ",", "\"_fnames\"", ":", "self", ".", "_fnames", ",", "\"_module_names\"", ":", "self", ".", "_module_names", ",", "\"_class_names\"", ":", "self", ".", "_class_names", ",", "}", "with", "open", "(", "callables_fname", ",", "\"w\"", ")", "as", "fobj", ":", "json", ".", "dump", "(", "fdict", ",", "fobj", ")" ]
r""" Save traced modules information to a `JSON`_ file. If the file exists it is overwritten :param callables_fname: File name :type callables_fname: :ref:`FileName` :raises: RuntimeError (Argument \\`fname\\` is not valid)
[ "r", "Save", "traced", "modules", "information", "to", "a", "JSON", "_", "file", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L582-L609
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner._close_callable
def _close_callable(self, node, force=False): """Record last line number of callable.""" # Only nodes that have a line number can be considered for closing # callables. Similarly, only nodes with lines greater than the one # already processed can be considered for closing callables try: lineno = node.lineno except AttributeError: return if lineno <= self._processed_line: return # [[[cog # code = """ # print(pcolor('Close callable @ line = {0}'.format(lineno), 'green')) # """ # cog.out(code) # ]]] # [[[end]]] # Extract node name for property closing. Once a property is found, # it can only be closed out by a node type that has a name name = "" try: name = ( node.name if hasattr(node, "name") else ( node.targets[0].id if hasattr(node.targets[0], "id") else node.targets[0].value.id ) ) except AttributeError: pass # Traverse backwards through call stack and close callables as needed indent = self._get_indent(node) count = -1 # [[[cog # code = """ # print( # pcolor( # ' Name {0} @ {1}, indent = {2}'.format( # name if name else 'None', lineno, indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] dlist = [] while count >= -len(self._indent_stack): element_full_name = self._indent_stack[count]["full_name"] edict = self._callables_db.get(element_full_name, None) stack_indent = self._indent_stack[count]["level"] open_callable = element_full_name and (not edict["last_lineno"]) # [[[cog # code = """ # print( # pcolor( # ' Name {0}, indent, {1}, stack_indent {2}'.format( # element_full_name, indent, stack_indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] if open_callable and ( force or (indent < stack_indent) or ( (indent == stack_indent) and ( (edict["type"] != "prop") or ( (edict["type"] == "prop") and (name and (name != element_full_name)) ) ) ) ): # [[[cog # code = """ # print( # pcolor( # ' Closing {0} @ {1}'.format( # element_full_name, lineno-1 # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] edict["last_lineno"] = lineno - 1 dlist.append(count) if indent > stack_indent: break count -= 1 # Callables have to be removed from stack when they are closed, # otherwise if a callable is subsequently followed after a few # lines by another callable at a further indentation level (like a for # loop) the second callable would incorrectly appear within the scope # of the first callable stack = self._indent_stack stack_length = len(self._indent_stack) dlist = [item for item in dlist if stack[item]["type"] != "module"] for item in dlist: del self._indent_stack[stack_length + item]
python
def _close_callable(self, node, force=False): """Record last line number of callable.""" # Only nodes that have a line number can be considered for closing # callables. Similarly, only nodes with lines greater than the one # already processed can be considered for closing callables try: lineno = node.lineno except AttributeError: return if lineno <= self._processed_line: return # [[[cog # code = """ # print(pcolor('Close callable @ line = {0}'.format(lineno), 'green')) # """ # cog.out(code) # ]]] # [[[end]]] # Extract node name for property closing. Once a property is found, # it can only be closed out by a node type that has a name name = "" try: name = ( node.name if hasattr(node, "name") else ( node.targets[0].id if hasattr(node.targets[0], "id") else node.targets[0].value.id ) ) except AttributeError: pass # Traverse backwards through call stack and close callables as needed indent = self._get_indent(node) count = -1 # [[[cog # code = """ # print( # pcolor( # ' Name {0} @ {1}, indent = {2}'.format( # name if name else 'None', lineno, indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] dlist = [] while count >= -len(self._indent_stack): element_full_name = self._indent_stack[count]["full_name"] edict = self._callables_db.get(element_full_name, None) stack_indent = self._indent_stack[count]["level"] open_callable = element_full_name and (not edict["last_lineno"]) # [[[cog # code = """ # print( # pcolor( # ' Name {0}, indent, {1}, stack_indent {2}'.format( # element_full_name, indent, stack_indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] if open_callable and ( force or (indent < stack_indent) or ( (indent == stack_indent) and ( (edict["type"] != "prop") or ( (edict["type"] == "prop") and (name and (name != element_full_name)) ) ) ) ): # [[[cog # code = """ # print( # pcolor( # ' Closing {0} @ {1}'.format( # element_full_name, lineno-1 # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] edict["last_lineno"] = lineno - 1 dlist.append(count) if indent > stack_indent: break count -= 1 # Callables have to be removed from stack when they are closed, # otherwise if a callable is subsequently followed after a few # lines by another callable at a further indentation level (like a for # loop) the second callable would incorrectly appear within the scope # of the first callable stack = self._indent_stack stack_length = len(self._indent_stack) dlist = [item for item in dlist if stack[item]["type"] != "module"] for item in dlist: del self._indent_stack[stack_length + item]
[ "def", "_close_callable", "(", "self", ",", "node", ",", "force", "=", "False", ")", ":", "# Only nodes that have a line number can be considered for closing", "# callables. Similarly, only nodes with lines greater than the one", "# already processed can be considered for closing callables", "try", ":", "lineno", "=", "node", ".", "lineno", "except", "AttributeError", ":", "return", "if", "lineno", "<=", "self", ".", "_processed_line", ":", "return", "# [[[cog", "# code = \"\"\"", "# print(pcolor('Close callable @ line = {0}'.format(lineno), 'green'))", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "# Extract node name for property closing. Once a property is found,", "# it can only be closed out by a node type that has a name", "name", "=", "\"\"", "try", ":", "name", "=", "(", "node", ".", "name", "if", "hasattr", "(", "node", ",", "\"name\"", ")", "else", "(", "node", ".", "targets", "[", "0", "]", ".", "id", "if", "hasattr", "(", "node", ".", "targets", "[", "0", "]", ",", "\"id\"", ")", "else", "node", ".", "targets", "[", "0", "]", ".", "value", ".", "id", ")", ")", "except", "AttributeError", ":", "pass", "# Traverse backwards through call stack and close callables as needed", "indent", "=", "self", ".", "_get_indent", "(", "node", ")", "count", "=", "-", "1", "# [[[cog", "# code = \"\"\"", "# print(", "# pcolor(", "# ' Name {0} @ {1}, indent = {2}'.format(", "# name if name else 'None', lineno, indent", "# ),", "# 'yellow'", "# )", "# )", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "dlist", "=", "[", "]", "while", "count", ">=", "-", "len", "(", "self", ".", "_indent_stack", ")", ":", "element_full_name", "=", "self", ".", "_indent_stack", "[", "count", "]", "[", "\"full_name\"", "]", "edict", "=", "self", ".", "_callables_db", ".", "get", "(", "element_full_name", ",", "None", ")", "stack_indent", "=", "self", ".", "_indent_stack", "[", "count", "]", "[", "\"level\"", "]", "open_callable", "=", "element_full_name", "and", "(", "not", "edict", "[", "\"last_lineno\"", "]", ")", "# [[[cog", "# code = \"\"\"", "# print(", "# pcolor(", "# ' Name {0}, indent, {1}, stack_indent {2}'.format(", "# element_full_name, indent, stack_indent", "# ),", "# 'yellow'", "# )", "# )", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "if", "open_callable", "and", "(", "force", "or", "(", "indent", "<", "stack_indent", ")", "or", "(", "(", "indent", "==", "stack_indent", ")", "and", "(", "(", "edict", "[", "\"type\"", "]", "!=", "\"prop\"", ")", "or", "(", "(", "edict", "[", "\"type\"", "]", "==", "\"prop\"", ")", "and", "(", "name", "and", "(", "name", "!=", "element_full_name", ")", ")", ")", ")", ")", ")", ":", "# [[[cog", "# code = \"\"\"", "# print(", "# pcolor(", "# ' Closing {0} @ {1}'.format(", "# element_full_name, lineno-1", "# ),", "# 'yellow'", "# )", "# )", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "edict", "[", "\"last_lineno\"", "]", "=", "lineno", "-", "1", "dlist", ".", "append", "(", "count", ")", "if", "indent", ">", "stack_indent", ":", "break", "count", "-=", "1", "# Callables have to be removed from stack when they are closed,", "# otherwise if a callable is subsequently followed after a few", "# lines by another callable at a further indentation level (like a for", "# loop) the second callable would incorrectly appear within the scope", "# of the first callable", "stack", "=", "self", ".", "_indent_stack", "stack_length", "=", "len", "(", "self", ".", "_indent_stack", ")", "dlist", "=", "[", "item", "for", "item", "in", "dlist", "if", "stack", "[", "item", "]", "[", "\"type\"", "]", "!=", "\"module\"", "]", "for", "item", "in", "dlist", ":", "del", "self", ".", "_indent_stack", "[", "stack_length", "+", "item", "]" ]
Record last line number of callable.
[ "Record", "last", "line", "number", "of", "callable", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L791-L903
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner._get_indent
def _get_indent(self, node): """Get node indentation level.""" lineno = node.lineno if lineno > len(self._lines): return -1 wsindent = self._wsregexp.match(self._lines[lineno - 1]) return len(wsindent.group(1))
python
def _get_indent(self, node): """Get node indentation level.""" lineno = node.lineno if lineno > len(self._lines): return -1 wsindent = self._wsregexp.match(self._lines[lineno - 1]) return len(wsindent.group(1))
[ "def", "_get_indent", "(", "self", ",", "node", ")", ":", "lineno", "=", "node", ".", "lineno", "if", "lineno", ">", "len", "(", "self", ".", "_lines", ")", ":", "return", "-", "1", "wsindent", "=", "self", ".", "_wsregexp", ".", "match", "(", "self", ".", "_lines", "[", "lineno", "-", "1", "]", ")", "return", "len", "(", "wsindent", ".", "group", "(", "1", ")", ")" ]
Get node indentation level.
[ "Get", "node", "indentation", "level", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L905-L911
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner._in_class
def _in_class(self, node): """Find if callable is function or method.""" # Move left one indentation level and check if that callable is a class indent = self._get_indent(node) for indent_dict in reversed(self._indent_stack): # pragma: no branch if (indent_dict["level"] < indent) or (indent_dict["type"] == "module"): return indent_dict["type"] == "class"
python
def _in_class(self, node): """Find if callable is function or method.""" # Move left one indentation level and check if that callable is a class indent = self._get_indent(node) for indent_dict in reversed(self._indent_stack): # pragma: no branch if (indent_dict["level"] < indent) or (indent_dict["type"] == "module"): return indent_dict["type"] == "class"
[ "def", "_in_class", "(", "self", ",", "node", ")", ":", "# Move left one indentation level and check if that callable is a class", "indent", "=", "self", ".", "_get_indent", "(", "node", ")", "for", "indent_dict", "in", "reversed", "(", "self", ".", "_indent_stack", ")", ":", "# pragma: no branch", "if", "(", "indent_dict", "[", "\"level\"", "]", "<", "indent", ")", "or", "(", "indent_dict", "[", "\"type\"", "]", "==", "\"module\"", ")", ":", "return", "indent_dict", "[", "\"type\"", "]", "==", "\"class\"" ]
Find if callable is function or method.
[ "Find", "if", "callable", "is", "function", "or", "method", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L913-L919
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner._pop_indent_stack
def _pop_indent_stack(self, node, node_type=None, action=None): """Get callable full name.""" indent = self._get_indent(node) indent_stack = copy.deepcopy(self._indent_stack) # Find enclosing scope while (len(indent_stack) > 1) and ( ( (indent <= indent_stack[-1]["level"]) and (indent_stack[-1]["type"] != "module") ) or (indent_stack[-1]["type"] == "prop") ): self._close_callable(node) indent_stack.pop() # Construct new callable name name = ( ( node.targets[0].id if hasattr(node.targets[0], "id") else node.targets[0].value.id ) if node_type == "prop" else node.name ) element_full_name = ".".join( [self._module] + [ indent_dict["prefix"] for indent_dict in indent_stack if indent_dict["type"] != "module" ] + [name] ) + ("({0})".format(action) if action else "") # Add new callable entry to indentation stack self._indent_stack = indent_stack self._indent_stack.append( { "level": indent, "prefix": name, "type": node_type, "full_name": element_full_name, "lineno": node.lineno, } ) return element_full_name
python
def _pop_indent_stack(self, node, node_type=None, action=None): """Get callable full name.""" indent = self._get_indent(node) indent_stack = copy.deepcopy(self._indent_stack) # Find enclosing scope while (len(indent_stack) > 1) and ( ( (indent <= indent_stack[-1]["level"]) and (indent_stack[-1]["type"] != "module") ) or (indent_stack[-1]["type"] == "prop") ): self._close_callable(node) indent_stack.pop() # Construct new callable name name = ( ( node.targets[0].id if hasattr(node.targets[0], "id") else node.targets[0].value.id ) if node_type == "prop" else node.name ) element_full_name = ".".join( [self._module] + [ indent_dict["prefix"] for indent_dict in indent_stack if indent_dict["type"] != "module" ] + [name] ) + ("({0})".format(action) if action else "") # Add new callable entry to indentation stack self._indent_stack = indent_stack self._indent_stack.append( { "level": indent, "prefix": name, "type": node_type, "full_name": element_full_name, "lineno": node.lineno, } ) return element_full_name
[ "def", "_pop_indent_stack", "(", "self", ",", "node", ",", "node_type", "=", "None", ",", "action", "=", "None", ")", ":", "indent", "=", "self", ".", "_get_indent", "(", "node", ")", "indent_stack", "=", "copy", ".", "deepcopy", "(", "self", ".", "_indent_stack", ")", "# Find enclosing scope", "while", "(", "len", "(", "indent_stack", ")", ">", "1", ")", "and", "(", "(", "(", "indent", "<=", "indent_stack", "[", "-", "1", "]", "[", "\"level\"", "]", ")", "and", "(", "indent_stack", "[", "-", "1", "]", "[", "\"type\"", "]", "!=", "\"module\"", ")", ")", "or", "(", "indent_stack", "[", "-", "1", "]", "[", "\"type\"", "]", "==", "\"prop\"", ")", ")", ":", "self", ".", "_close_callable", "(", "node", ")", "indent_stack", ".", "pop", "(", ")", "# Construct new callable name", "name", "=", "(", "(", "node", ".", "targets", "[", "0", "]", ".", "id", "if", "hasattr", "(", "node", ".", "targets", "[", "0", "]", ",", "\"id\"", ")", "else", "node", ".", "targets", "[", "0", "]", ".", "value", ".", "id", ")", "if", "node_type", "==", "\"prop\"", "else", "node", ".", "name", ")", "element_full_name", "=", "\".\"", ".", "join", "(", "[", "self", ".", "_module", "]", "+", "[", "indent_dict", "[", "\"prefix\"", "]", "for", "indent_dict", "in", "indent_stack", "if", "indent_dict", "[", "\"type\"", "]", "!=", "\"module\"", "]", "+", "[", "name", "]", ")", "+", "(", "\"({0})\"", ".", "format", "(", "action", ")", "if", "action", "else", "\"\"", ")", "# Add new callable entry to indentation stack", "self", ".", "_indent_stack", "=", "indent_stack", "self", ".", "_indent_stack", ".", "append", "(", "{", "\"level\"", ":", "indent", ",", "\"prefix\"", ":", "name", ",", "\"type\"", ":", "node_type", ",", "\"full_name\"", ":", "element_full_name", ",", "\"lineno\"", ":", "node", ".", "lineno", ",", "}", ")", "return", "element_full_name" ]
Get callable full name.
[ "Get", "callable", "full", "name", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L921-L965
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner.generic_visit
def generic_visit(self, node): """Implement generic node.""" # [[[cog # cog.out("print(pcolor('Enter generic visitor', 'magenta'))") # ]]] # [[[end]]] # A generic visitor that potentially closes callables is needed to # close enclosed callables that are not at the end of the enclosing # callable, otherwise the ending line of the enclosed callable would # be the ending line of the enclosing callable, which would be # incorrect self._close_callable(node) super(_AstTreeScanner, self).generic_visit(node)
python
def generic_visit(self, node): """Implement generic node.""" # [[[cog # cog.out("print(pcolor('Enter generic visitor', 'magenta'))") # ]]] # [[[end]]] # A generic visitor that potentially closes callables is needed to # close enclosed callables that are not at the end of the enclosing # callable, otherwise the ending line of the enclosed callable would # be the ending line of the enclosing callable, which would be # incorrect self._close_callable(node) super(_AstTreeScanner, self).generic_visit(node)
[ "def", "generic_visit", "(", "self", ",", "node", ")", ":", "# [[[cog", "# cog.out(\"print(pcolor('Enter generic visitor', 'magenta'))\")", "# ]]]", "# [[[end]]]", "# A generic visitor that potentially closes callables is needed to", "# close enclosed callables that are not at the end of the enclosing", "# callable, otherwise the ending line of the enclosed callable would", "# be the ending line of the enclosing callable, which would be", "# incorrect", "self", ".", "_close_callable", "(", "node", ")", "super", "(", "_AstTreeScanner", ",", "self", ")", ".", "generic_visit", "(", "node", ")" ]
Implement generic node.
[ "Implement", "generic", "node", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L967-L979
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner.visit_Assign
def visit_Assign(self, node): """ Implement assignment walker. Parse class properties defined via the property() function """ # [[[cog # cog.out("print(pcolor('Enter assign visitor', 'magenta'))") # ]]] # [[[end]]] # ### # Class-level assignment may also be a class attribute that is not # a managed attribute, record it anyway, no harm in doing so as it # is not attached to a callable if self._in_class(node): element_full_name = self._pop_indent_stack(node, "prop") code_id = (self._fname, node.lineno) self._processed_line = node.lineno self._callables_db[element_full_name] = { "name": element_full_name, "type": "prop", "code_id": code_id, "last_lineno": None, } self._reverse_callables_db[code_id] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting property {0} @ {1}'.format( # element_full_name, code_id[1] # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # Get property actions self.generic_visit(node)
python
def visit_Assign(self, node): """ Implement assignment walker. Parse class properties defined via the property() function """ # [[[cog # cog.out("print(pcolor('Enter assign visitor', 'magenta'))") # ]]] # [[[end]]] # ### # Class-level assignment may also be a class attribute that is not # a managed attribute, record it anyway, no harm in doing so as it # is not attached to a callable if self._in_class(node): element_full_name = self._pop_indent_stack(node, "prop") code_id = (self._fname, node.lineno) self._processed_line = node.lineno self._callables_db[element_full_name] = { "name": element_full_name, "type": "prop", "code_id": code_id, "last_lineno": None, } self._reverse_callables_db[code_id] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting property {0} @ {1}'.format( # element_full_name, code_id[1] # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # Get property actions self.generic_visit(node)
[ "def", "visit_Assign", "(", "self", ",", "node", ")", ":", "# [[[cog", "# cog.out(\"print(pcolor('Enter assign visitor', 'magenta'))\")", "# ]]]", "# [[[end]]]", "# ###", "# Class-level assignment may also be a class attribute that is not", "# a managed attribute, record it anyway, no harm in doing so as it", "# is not attached to a callable", "if", "self", ".", "_in_class", "(", "node", ")", ":", "element_full_name", "=", "self", ".", "_pop_indent_stack", "(", "node", ",", "\"prop\"", ")", "code_id", "=", "(", "self", ".", "_fname", ",", "node", ".", "lineno", ")", "self", ".", "_processed_line", "=", "node", ".", "lineno", "self", ".", "_callables_db", "[", "element_full_name", "]", "=", "{", "\"name\"", ":", "element_full_name", ",", "\"type\"", ":", "\"prop\"", ",", "\"code_id\"", ":", "code_id", ",", "\"last_lineno\"", ":", "None", ",", "}", "self", ".", "_reverse_callables_db", "[", "code_id", "]", "=", "element_full_name", "# [[[cog", "# code = \"\"\"", "# print(", "# pcolor(", "# 'Visiting property {0} @ {1}'.format(", "# element_full_name, code_id[1]", "# ),", "# 'green'", "# )", "# )", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "# Get property actions", "self", ".", "generic_visit", "(", "node", ")" ]
Implement assignment walker. Parse class properties defined via the property() function
[ "Implement", "assignment", "walker", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L989-L1029
train
pmacosta/pexdoc
pexdoc/pinspect.py
_AstTreeScanner.visit_ClassDef
def visit_ClassDef(self, node): """Implement class walker.""" # [[[cog # cog.out("print(pcolor('Enter class visitor', 'magenta'))") # ]]] # [[[end]]] # Get class information (name, line number, etc.) element_full_name = self._pop_indent_stack(node, "class") code_id = (self._fname, node.lineno) self._processed_line = node.lineno # Add class entry to dictionaries self._class_names.append(element_full_name) self._callables_db[element_full_name] = { "name": element_full_name, "type": "class", "code_id": code_id, "last_lineno": None, } self._reverse_callables_db[code_id] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting class {0} @ {1}, indent = {2}'.format( # element_full_name, code_id[1], self._get_indent(node) # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] self.generic_visit(node)
python
def visit_ClassDef(self, node): """Implement class walker.""" # [[[cog # cog.out("print(pcolor('Enter class visitor', 'magenta'))") # ]]] # [[[end]]] # Get class information (name, line number, etc.) element_full_name = self._pop_indent_stack(node, "class") code_id = (self._fname, node.lineno) self._processed_line = node.lineno # Add class entry to dictionaries self._class_names.append(element_full_name) self._callables_db[element_full_name] = { "name": element_full_name, "type": "class", "code_id": code_id, "last_lineno": None, } self._reverse_callables_db[code_id] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting class {0} @ {1}, indent = {2}'.format( # element_full_name, code_id[1], self._get_indent(node) # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] self.generic_visit(node)
[ "def", "visit_ClassDef", "(", "self", ",", "node", ")", ":", "# [[[cog", "# cog.out(\"print(pcolor('Enter class visitor', 'magenta'))\")", "# ]]]", "# [[[end]]]", "# Get class information (name, line number, etc.)", "element_full_name", "=", "self", ".", "_pop_indent_stack", "(", "node", ",", "\"class\"", ")", "code_id", "=", "(", "self", ".", "_fname", ",", "node", ".", "lineno", ")", "self", ".", "_processed_line", "=", "node", ".", "lineno", "# Add class entry to dictionaries", "self", ".", "_class_names", ".", "append", "(", "element_full_name", ")", "self", ".", "_callables_db", "[", "element_full_name", "]", "=", "{", "\"name\"", ":", "element_full_name", ",", "\"type\"", ":", "\"class\"", ",", "\"code_id\"", ":", "code_id", ",", "\"last_lineno\"", ":", "None", ",", "}", "self", ".", "_reverse_callables_db", "[", "code_id", "]", "=", "element_full_name", "# [[[cog", "# code = \"\"\"", "# print(", "# pcolor(", "# 'Visiting class {0} @ {1}, indent = {2}'.format(", "# element_full_name, code_id[1], self._get_indent(node)", "# ),", "# 'green'", "# )", "# )", "# \"\"\"", "# cog.out(code)", "# ]]]", "# [[[end]]]", "self", ".", "generic_visit", "(", "node", ")" ]
Implement class walker.
[ "Implement", "class", "walker", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L1031-L1064
train
rbeagrie/wrapit
wrapit/api.py
run
def run(task_creators, args, task_selectors=[]): """run doit using task_creators @param task_creators: module or dict containing task creators """ if args.reset_dep: sys.exit(DoitMain(WrapitLoader(args, task_creators)).run(['reset-dep'])) else: sys.exit(DoitMain(WrapitLoader(args, task_creators)).run(task_selectors))
python
def run(task_creators, args, task_selectors=[]): """run doit using task_creators @param task_creators: module or dict containing task creators """ if args.reset_dep: sys.exit(DoitMain(WrapitLoader(args, task_creators)).run(['reset-dep'])) else: sys.exit(DoitMain(WrapitLoader(args, task_creators)).run(task_selectors))
[ "def", "run", "(", "task_creators", ",", "args", ",", "task_selectors", "=", "[", "]", ")", ":", "if", "args", ".", "reset_dep", ":", "sys", ".", "exit", "(", "DoitMain", "(", "WrapitLoader", "(", "args", ",", "task_creators", ")", ")", ".", "run", "(", "[", "'reset-dep'", "]", ")", ")", "else", ":", "sys", ".", "exit", "(", "DoitMain", "(", "WrapitLoader", "(", "args", ",", "task_creators", ")", ")", ".", "run", "(", "task_selectors", ")", ")" ]
run doit using task_creators @param task_creators: module or dict containing task creators
[ "run", "doit", "using", "task_creators" ]
ee01c20cca0a9e51c62fb73c894227e36d9abaaa
https://github.com/rbeagrie/wrapit/blob/ee01c20cca0a9e51c62fb73c894227e36d9abaaa/wrapit/api.py#L7-L15
train
a1ezzz/wasp-general
wasp_general/task/registry.py
WTaskRegistryStorage.tasks_by_tag
def tasks_by_tag(self, registry_tag): """ Get tasks from registry by its tag :param registry_tag: any hash-able object :return: Return task (if :attr:`.WTaskRegistryStorage.__multiple_tasks_per_tag__` is not True) or \ list of tasks """ if registry_tag not in self.__registry.keys(): return None tasks = self.__registry[registry_tag] return tasks if self.__multiple_tasks_per_tag__ is True else tasks[0]
python
def tasks_by_tag(self, registry_tag): """ Get tasks from registry by its tag :param registry_tag: any hash-able object :return: Return task (if :attr:`.WTaskRegistryStorage.__multiple_tasks_per_tag__` is not True) or \ list of tasks """ if registry_tag not in self.__registry.keys(): return None tasks = self.__registry[registry_tag] return tasks if self.__multiple_tasks_per_tag__ is True else tasks[0]
[ "def", "tasks_by_tag", "(", "self", ",", "registry_tag", ")", ":", "if", "registry_tag", "not", "in", "self", ".", "__registry", ".", "keys", "(", ")", ":", "return", "None", "tasks", "=", "self", ".", "__registry", "[", "registry_tag", "]", "return", "tasks", "if", "self", ".", "__multiple_tasks_per_tag__", "is", "True", "else", "tasks", "[", "0", "]" ]
Get tasks from registry by its tag :param registry_tag: any hash-able object :return: Return task (if :attr:`.WTaskRegistryStorage.__multiple_tasks_per_tag__` is not True) or \ list of tasks
[ "Get", "tasks", "from", "registry", "by", "its", "tag" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L201-L211
train
a1ezzz/wasp-general
wasp_general/task/registry.py
WTaskRegistryStorage.count
def count(self): """ Registered task count :return: int """ result = 0 for tasks in self.__registry.values(): result += len(tasks) return result
python
def count(self): """ Registered task count :return: int """ result = 0 for tasks in self.__registry.values(): result += len(tasks) return result
[ "def", "count", "(", "self", ")", ":", "result", "=", "0", "for", "tasks", "in", "self", ".", "__registry", ".", "values", "(", ")", ":", "result", "+=", "len", "(", "tasks", ")", "return", "result" ]
Registered task count :return: int
[ "Registered", "task", "count" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L230-L238
train
a1ezzz/wasp-general
wasp_general/task/registry.py
WTaskRegistry.add
def add(cls, task_cls): """ Add task class to storage :param task_cls: task to add :return: None """ if task_cls.__registry_tag__ is None and cls.__skip_none_registry_tag__ is True: return cls.registry_storage().add(task_cls)
python
def add(cls, task_cls): """ Add task class to storage :param task_cls: task to add :return: None """ if task_cls.__registry_tag__ is None and cls.__skip_none_registry_tag__ is True: return cls.registry_storage().add(task_cls)
[ "def", "add", "(", "cls", ",", "task_cls", ")", ":", "if", "task_cls", ".", "__registry_tag__", "is", "None", "and", "cls", ".", "__skip_none_registry_tag__", "is", "True", ":", "return", "cls", ".", "registry_storage", "(", ")", ".", "add", "(", "task_cls", ")" ]
Add task class to storage :param task_cls: task to add :return: None
[ "Add", "task", "class", "to", "storage" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L270-L280
train
pmacosta/pexdoc
docs/support/exh_example.py
my_func
def my_func(name): """Sample function.""" # Add exception exobj = addex(TypeError, "Argument `name` is not valid") # Conditionally raise exception exobj(not isinstance(name, str)) print("My name is {0}".format(name))
python
def my_func(name): """Sample function.""" # Add exception exobj = addex(TypeError, "Argument `name` is not valid") # Conditionally raise exception exobj(not isinstance(name, str)) print("My name is {0}".format(name))
[ "def", "my_func", "(", "name", ")", ":", "# Add exception", "exobj", "=", "addex", "(", "TypeError", ",", "\"Argument `name` is not valid\"", ")", "# Conditionally raise exception", "exobj", "(", "not", "isinstance", "(", "name", ",", "str", ")", ")", "print", "(", "\"My name is {0}\"", ".", "format", "(", "name", ")", ")" ]
Sample function.
[ "Sample", "function", "." ]
201ac243e5781347feb75896a4231429fe6da4b1
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/docs/support/exh_example.py#L10-L16
train
a1ezzz/wasp-general
wasp_general/command/command.py
WCommandPrioritizedSelector.add_prioritized
def add_prioritized(self, command_obj, priority): """ Add command with the specified priority :param command_obj: command to add :param priority: command priority :return: None """ if priority not in self.__priorities.keys(): self.__priorities[priority] = [] self.__priorities[priority].append(command_obj)
python
def add_prioritized(self, command_obj, priority): """ Add command with the specified priority :param command_obj: command to add :param priority: command priority :return: None """ if priority not in self.__priorities.keys(): self.__priorities[priority] = [] self.__priorities[priority].append(command_obj)
[ "def", "add_prioritized", "(", "self", ",", "command_obj", ",", "priority", ")", ":", "if", "priority", "not", "in", "self", ".", "__priorities", ".", "keys", "(", ")", ":", "self", ".", "__priorities", "[", "priority", "]", "=", "[", "]", "self", ".", "__priorities", "[", "priority", "]", ".", "append", "(", "command_obj", ")" ]
Add command with the specified priority :param command_obj: command to add :param priority: command priority :return: None
[ "Add", "command", "with", "the", "specified", "priority" ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/command/command.py#L205-L215
train
a1ezzz/wasp-general
wasp_general/command/command.py
WCommandSet.__track_vars
def __track_vars(self, command_result): """ Check if there are any tracked variable inside the result. And keep them for future use. :param command_result: command result tot check :return: """ command_env = command_result.environment() for var_name in self.tracked_vars(): if var_name in command_env.keys(): self.__vars[var_name] = command_env[var_name]
python
def __track_vars(self, command_result): """ Check if there are any tracked variable inside the result. And keep them for future use. :param command_result: command result tot check :return: """ command_env = command_result.environment() for var_name in self.tracked_vars(): if var_name in command_env.keys(): self.__vars[var_name] = command_env[var_name]
[ "def", "__track_vars", "(", "self", ",", "command_result", ")", ":", "command_env", "=", "command_result", ".", "environment", "(", ")", "for", "var_name", "in", "self", ".", "tracked_vars", "(", ")", ":", "if", "var_name", "in", "command_env", ".", "keys", "(", ")", ":", "self", ".", "__vars", "[", "var_name", "]", "=", "command_env", "[", "var_name", "]" ]
Check if there are any tracked variable inside the result. And keep them for future use. :param command_result: command result tot check :return:
[ "Check", "if", "there", "are", "any", "tracked", "variable", "inside", "the", "result", ".", "And", "keep", "them", "for", "future", "use", "." ]
1029839d33eb663f8dec76c1c46754d53c1de4a9
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/command/command.py#L320-L330
train